You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1192 lines
38KB

  1. /*
  2. * H.264 hardware encoding using nvidia nvenc
  3. * Copyright (c) 2014 Timo Rothenpieler <timo@rothenpieler.org>
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #if defined(_WIN32)
  22. #include <windows.h>
  23. #else
  24. #include <dlfcn.h>
  25. #endif
  26. #include <nvEncodeAPI.h>
  27. #include "libavutil/internal.h"
  28. #include "libavutil/imgutils.h"
  29. #include "libavutil/avassert.h"
  30. #include "libavutil/opt.h"
  31. #include "libavutil/mem.h"
  32. #include "avcodec.h"
  33. #include "internal.h"
  34. #include "thread.h"
  35. #if defined(_WIN32)
  36. #define CUDAAPI __stdcall
  37. #else
  38. #define CUDAAPI
  39. #endif
  40. #if defined(_WIN32)
  41. #define LOAD_FUNC(l, s) GetProcAddress(l, s)
  42. #define DL_CLOSE_FUNC(l) FreeLibrary(l)
  43. #else
  44. #define LOAD_FUNC(l, s) dlsym(l, s)
  45. #define DL_CLOSE_FUNC(l) dlclose(l)
  46. #endif
  47. typedef enum cudaError_enum {
  48. CUDA_SUCCESS = 0
  49. } CUresult;
  50. typedef int CUdevice;
  51. typedef void* CUcontext;
  52. typedef CUresult(CUDAAPI *PCUINIT)(unsigned int Flags);
  53. typedef CUresult(CUDAAPI *PCUDEVICEGETCOUNT)(int *count);
  54. typedef CUresult(CUDAAPI *PCUDEVICEGET)(CUdevice *device, int ordinal);
  55. typedef CUresult(CUDAAPI *PCUDEVICEGETNAME)(char *name, int len, CUdevice dev);
  56. typedef CUresult(CUDAAPI *PCUDEVICECOMPUTECAPABILITY)(int *major, int *minor, CUdevice dev);
  57. typedef CUresult(CUDAAPI *PCUCTXCREATE)(CUcontext *pctx, unsigned int flags, CUdevice dev);
  58. typedef CUresult(CUDAAPI *PCUCTXPOPCURRENT)(CUcontext *pctx);
  59. typedef CUresult(CUDAAPI *PCUCTXDESTROY)(CUcontext ctx);
  60. typedef NVENCSTATUS (NVENCAPI* PNVENCODEAPICREATEINSTANCE)(NV_ENCODE_API_FUNCTION_LIST *functionList);
  61. #if NVENCAPI_MAJOR_VERSION < 5
  62. static const GUID dummy_license = { 0x0, 0x0, 0x0, { 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 } };
  63. #endif
  64. typedef struct NvencInputSurface
  65. {
  66. NV_ENC_INPUT_PTR input_surface;
  67. int width;
  68. int height;
  69. int lockCount;
  70. NV_ENC_BUFFER_FORMAT format;
  71. } NvencInputSurface;
  72. typedef struct NvencOutputSurface
  73. {
  74. NV_ENC_OUTPUT_PTR output_surface;
  75. int size;
  76. NvencInputSurface* input_surface;
  77. int busy;
  78. } NvencOutputSurface;
  79. typedef struct NvencData
  80. {
  81. union {
  82. int64_t timestamp;
  83. NvencOutputSurface *surface;
  84. };
  85. } NvencData;
  86. typedef struct NvencDataList
  87. {
  88. NvencData* data;
  89. uint32_t pos;
  90. uint32_t count;
  91. uint32_t size;
  92. } NvencDataList;
  93. typedef struct NvencDynLoadFunctions
  94. {
  95. PCUINIT cu_init;
  96. PCUDEVICEGETCOUNT cu_device_get_count;
  97. PCUDEVICEGET cu_device_get;
  98. PCUDEVICEGETNAME cu_device_get_name;
  99. PCUDEVICECOMPUTECAPABILITY cu_device_compute_capability;
  100. PCUCTXCREATE cu_ctx_create;
  101. PCUCTXPOPCURRENT cu_ctx_pop_current;
  102. PCUCTXDESTROY cu_ctx_destroy;
  103. NV_ENCODE_API_FUNCTION_LIST nvenc_funcs;
  104. int nvenc_device_count;
  105. CUdevice nvenc_devices[16];
  106. #if defined(_WIN32)
  107. HMODULE cuda_lib;
  108. HMODULE nvenc_lib;
  109. #else
  110. void* cuda_lib;
  111. void* nvenc_lib;
  112. #endif
  113. } NvencDynLoadFunctions;
  114. typedef struct NvencContext
  115. {
  116. AVClass *avclass;
  117. NvencDynLoadFunctions nvenc_dload_funcs;
  118. NV_ENC_INITIALIZE_PARAMS init_encode_params;
  119. NV_ENC_CONFIG encode_config;
  120. CUcontext cu_context;
  121. int max_surface_count;
  122. NvencInputSurface *input_surfaces;
  123. NvencOutputSurface *output_surfaces;
  124. NvencDataList output_surface_queue;
  125. NvencDataList output_surface_ready_queue;
  126. NvencDataList timestamp_list;
  127. int64_t last_dts;
  128. void *nvencoder;
  129. char *preset;
  130. int cbr;
  131. int twopass;
  132. int gobpattern;
  133. int gpu;
  134. } NvencContext;
  135. static NvencData* data_queue_dequeue(NvencDataList* queue)
  136. {
  137. uint32_t mask;
  138. uint32_t read_pos;
  139. av_assert0(queue);
  140. av_assert0(queue->size);
  141. av_assert0(queue->data);
  142. if (!queue->count)
  143. return NULL;
  144. /* Size always is a multiple of two */
  145. mask = queue->size - 1;
  146. read_pos = (queue->pos - queue->count) & mask;
  147. queue->count--;
  148. return &queue->data[read_pos];
  149. }
  150. static int data_queue_enqueue(NvencDataList* queue, NvencData *data)
  151. {
  152. NvencDataList new_queue;
  153. NvencData* tmp_data;
  154. uint32_t mask;
  155. if (!queue->size) {
  156. /* size always has to be a multiple of two */
  157. queue->size = 4;
  158. queue->pos = 0;
  159. queue->count = 0;
  160. queue->data = av_malloc(queue->size * sizeof(*(queue->data)));
  161. if (!queue->data) {
  162. queue->size = 0;
  163. return AVERROR(ENOMEM);
  164. }
  165. }
  166. if (queue->count == queue->size) {
  167. new_queue.size = queue->size << 1;
  168. new_queue.pos = 0;
  169. new_queue.count = 0;
  170. new_queue.data = av_malloc(new_queue.size * sizeof(*(queue->data)));
  171. if (!new_queue.data)
  172. return AVERROR(ENOMEM);
  173. while (tmp_data = data_queue_dequeue(queue))
  174. data_queue_enqueue(&new_queue, tmp_data);
  175. av_free(queue->data);
  176. *queue = new_queue;
  177. }
  178. mask = queue->size - 1;
  179. queue->data[queue->pos] = *data;
  180. queue->pos = (queue->pos + 1) & mask;
  181. queue->count++;
  182. return 0;
  183. }
  184. static int out_surf_queue_enqueue(NvencDataList* queue, NvencOutputSurface* surface)
  185. {
  186. NvencData data;
  187. data.surface = surface;
  188. return data_queue_enqueue(queue, &data);
  189. }
  190. static NvencOutputSurface* out_surf_queue_dequeue(NvencDataList* queue)
  191. {
  192. NvencData* res = data_queue_dequeue(queue);
  193. if (!res)
  194. return NULL;
  195. return res->surface;
  196. }
  197. static int timestamp_queue_enqueue(NvencDataList* queue, int64_t timestamp)
  198. {
  199. NvencData data;
  200. data.timestamp = timestamp;
  201. return data_queue_enqueue(queue, &data);
  202. }
  203. static int64_t timestamp_queue_dequeue(NvencDataList* queue)
  204. {
  205. NvencData* res = data_queue_dequeue(queue);
  206. if (!res)
  207. return AV_NOPTS_VALUE;
  208. return res->timestamp;
  209. }
  210. #define CHECK_LOAD_FUNC(t, f, s) \
  211. do { \
  212. (f) = (t)LOAD_FUNC(dl_fn->cuda_lib, s); \
  213. if (!(f)) { \
  214. av_log(avctx, AV_LOG_FATAL, "Failed loading %s from CUDA library\n", s); \
  215. goto error; \
  216. } \
  217. } while (0)
  218. static av_cold int nvenc_dyload_cuda(AVCodecContext *avctx)
  219. {
  220. NvencContext *ctx = avctx->priv_data;
  221. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  222. if (dl_fn->cuda_lib)
  223. return 1;
  224. #if defined(_WIN32)
  225. dl_fn->cuda_lib = LoadLibrary(TEXT("nvcuda.dll"));
  226. #else
  227. dl_fn->cuda_lib = dlopen("libcuda.so", RTLD_LAZY);
  228. #endif
  229. if (!dl_fn->cuda_lib) {
  230. av_log(avctx, AV_LOG_FATAL, "Failed loading CUDA library\n");
  231. goto error;
  232. }
  233. CHECK_LOAD_FUNC(PCUINIT, dl_fn->cu_init, "cuInit");
  234. CHECK_LOAD_FUNC(PCUDEVICEGETCOUNT, dl_fn->cu_device_get_count, "cuDeviceGetCount");
  235. CHECK_LOAD_FUNC(PCUDEVICEGET, dl_fn->cu_device_get, "cuDeviceGet");
  236. CHECK_LOAD_FUNC(PCUDEVICEGETNAME, dl_fn->cu_device_get_name, "cuDeviceGetName");
  237. CHECK_LOAD_FUNC(PCUDEVICECOMPUTECAPABILITY, dl_fn->cu_device_compute_capability, "cuDeviceComputeCapability");
  238. CHECK_LOAD_FUNC(PCUCTXCREATE, dl_fn->cu_ctx_create, "cuCtxCreate_v2");
  239. CHECK_LOAD_FUNC(PCUCTXPOPCURRENT, dl_fn->cu_ctx_pop_current, "cuCtxPopCurrent_v2");
  240. CHECK_LOAD_FUNC(PCUCTXDESTROY, dl_fn->cu_ctx_destroy, "cuCtxDestroy_v2");
  241. return 1;
  242. error:
  243. if (dl_fn->cuda_lib)
  244. DL_CLOSE_FUNC(dl_fn->cuda_lib);
  245. dl_fn->cuda_lib = NULL;
  246. return 0;
  247. }
  248. static av_cold int check_cuda_errors(AVCodecContext *avctx, CUresult err, const char *func)
  249. {
  250. if (err != CUDA_SUCCESS) {
  251. av_log(avctx, AV_LOG_FATAL, ">> %s - failed with error code 0x%x\n", func, err);
  252. return 0;
  253. }
  254. return 1;
  255. }
  256. #define check_cuda_errors(f) if (!check_cuda_errors(avctx, f, #f)) goto error
  257. static av_cold int nvenc_check_cuda(AVCodecContext *avctx)
  258. {
  259. int device_count = 0;
  260. CUdevice cu_device = 0;
  261. char gpu_name[128];
  262. int smminor = 0, smmajor = 0;
  263. int i, smver;
  264. NvencContext *ctx = avctx->priv_data;
  265. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  266. if (!nvenc_dyload_cuda(avctx))
  267. return 0;
  268. if (dl_fn->nvenc_device_count > 0)
  269. return 1;
  270. check_cuda_errors(dl_fn->cu_init(0));
  271. check_cuda_errors(dl_fn->cu_device_get_count(&device_count));
  272. if (!device_count) {
  273. av_log(avctx, AV_LOG_FATAL, "No CUDA capable devices found\n");
  274. goto error;
  275. }
  276. av_log(avctx, AV_LOG_VERBOSE, "%d CUDA capable devices found\n", device_count);
  277. dl_fn->nvenc_device_count = 0;
  278. for (i = 0; i < device_count; ++i) {
  279. check_cuda_errors(dl_fn->cu_device_get(&cu_device, i));
  280. check_cuda_errors(dl_fn->cu_device_get_name(gpu_name, sizeof(gpu_name), cu_device));
  281. check_cuda_errors(dl_fn->cu_device_compute_capability(&smmajor, &smminor, cu_device));
  282. smver = (smmajor << 4) | smminor;
  283. av_log(avctx, AV_LOG_VERBOSE, "[ GPU #%d - < %s > has Compute SM %d.%d, NVENC %s ]\n", i, gpu_name, smmajor, smminor, (smver >= 0x30) ? "Available" : "Not Available");
  284. if (smver >= 0x30)
  285. dl_fn->nvenc_devices[dl_fn->nvenc_device_count++] = cu_device;
  286. }
  287. if (!dl_fn->nvenc_device_count) {
  288. av_log(avctx, AV_LOG_FATAL, "No NVENC capable devices found\n");
  289. goto error;
  290. }
  291. return 1;
  292. error:
  293. dl_fn->nvenc_device_count = 0;
  294. return 0;
  295. }
  296. static av_cold int nvenc_dyload_nvenc(AVCodecContext *avctx)
  297. {
  298. PNVENCODEAPICREATEINSTANCE nvEncodeAPICreateInstance = 0;
  299. NVENCSTATUS nvstatus;
  300. NvencContext *ctx = avctx->priv_data;
  301. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  302. if (!nvenc_check_cuda(avctx))
  303. return 0;
  304. if (dl_fn->nvenc_lib)
  305. return 1;
  306. #if defined(_WIN32)
  307. if (sizeof(void*) == 8) {
  308. dl_fn->nvenc_lib = LoadLibrary(TEXT("nvEncodeAPI64.dll"));
  309. } else {
  310. dl_fn->nvenc_lib = LoadLibrary(TEXT("nvEncodeAPI.dll"));
  311. }
  312. #else
  313. dl_fn->nvenc_lib = dlopen("libnvidia-encode.so.1", RTLD_LAZY);
  314. #endif
  315. if (!dl_fn->nvenc_lib) {
  316. av_log(avctx, AV_LOG_FATAL, "Failed loading the nvenc library\n");
  317. goto error;
  318. }
  319. nvEncodeAPICreateInstance = (PNVENCODEAPICREATEINSTANCE)LOAD_FUNC(dl_fn->nvenc_lib, "NvEncodeAPICreateInstance");
  320. if (!nvEncodeAPICreateInstance) {
  321. av_log(avctx, AV_LOG_FATAL, "Failed to load nvenc entrypoint\n");
  322. goto error;
  323. }
  324. dl_fn->nvenc_funcs.version = NV_ENCODE_API_FUNCTION_LIST_VER;
  325. nvstatus = nvEncodeAPICreateInstance(&dl_fn->nvenc_funcs);
  326. if (nvstatus != NV_ENC_SUCCESS) {
  327. av_log(avctx, AV_LOG_FATAL, "Failed to create nvenc instance\n");
  328. goto error;
  329. }
  330. av_log(avctx, AV_LOG_VERBOSE, "Nvenc initialized successfully\n");
  331. return 1;
  332. error:
  333. if (dl_fn->nvenc_lib)
  334. DL_CLOSE_FUNC(dl_fn->nvenc_lib);
  335. dl_fn->nvenc_lib = NULL;
  336. return 0;
  337. }
  338. static av_cold void nvenc_unload_nvenc(AVCodecContext *avctx)
  339. {
  340. NvencContext *ctx = avctx->priv_data;
  341. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  342. DL_CLOSE_FUNC(dl_fn->nvenc_lib);
  343. dl_fn->nvenc_lib = NULL;
  344. dl_fn->nvenc_device_count = 0;
  345. DL_CLOSE_FUNC(dl_fn->cuda_lib);
  346. dl_fn->cuda_lib = NULL;
  347. dl_fn->cu_init = NULL;
  348. dl_fn->cu_device_get_count = NULL;
  349. dl_fn->cu_device_get = NULL;
  350. dl_fn->cu_device_get_name = NULL;
  351. dl_fn->cu_device_compute_capability = NULL;
  352. dl_fn->cu_ctx_create = NULL;
  353. dl_fn->cu_ctx_pop_current = NULL;
  354. dl_fn->cu_ctx_destroy = NULL;
  355. av_log(avctx, AV_LOG_VERBOSE, "Nvenc unloaded\n");
  356. }
  357. static av_cold int nvenc_encode_init(AVCodecContext *avctx)
  358. {
  359. NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS encode_session_params = { 0 };
  360. NV_ENC_PRESET_CONFIG preset_config = { 0 };
  361. CUcontext cu_context_curr;
  362. CUresult cu_res;
  363. GUID encoder_preset = NV_ENC_PRESET_HQ_GUID;
  364. NVENCSTATUS nv_status = NV_ENC_SUCCESS;
  365. int surfaceCount = 0;
  366. int i, num_mbs;
  367. int isLL = 0;
  368. int res = 0;
  369. #if NVENCAPI_MAJOR_VERSION < 5
  370. GUID license = dummy_license;
  371. #endif
  372. NvencContext *ctx = avctx->priv_data;
  373. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  374. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  375. if (!nvenc_dyload_nvenc(avctx))
  376. return AVERROR_EXTERNAL;
  377. avctx->coded_frame = av_frame_alloc();
  378. if (!avctx->coded_frame) {
  379. res = AVERROR(ENOMEM);
  380. goto error;
  381. }
  382. ctx->last_dts = AV_NOPTS_VALUE;
  383. ctx->encode_config.version = NV_ENC_CONFIG_VER;
  384. ctx->init_encode_params.version = NV_ENC_INITIALIZE_PARAMS_VER;
  385. preset_config.version = NV_ENC_PRESET_CONFIG_VER;
  386. preset_config.presetCfg.version = NV_ENC_CONFIG_VER;
  387. encode_session_params.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER;
  388. encode_session_params.apiVersion = NVENCAPI_VERSION;
  389. #if NVENCAPI_MAJOR_VERSION < 5
  390. encode_session_params.clientKeyPtr = &license;
  391. #endif
  392. if (ctx->gpu >= dl_fn->nvenc_device_count) {
  393. av_log(avctx, AV_LOG_FATAL, "Requested GPU %d, but only %d GPUs are available!\n", ctx->gpu, dl_fn->nvenc_device_count);
  394. res = AVERROR(EINVAL);
  395. goto error;
  396. }
  397. ctx->cu_context = NULL;
  398. cu_res = dl_fn->cu_ctx_create(&ctx->cu_context, 0, dl_fn->nvenc_devices[ctx->gpu]);
  399. if (cu_res != CUDA_SUCCESS) {
  400. av_log(avctx, AV_LOG_FATAL, "Failed creating CUDA context for NVENC: 0x%x\n", (int)cu_res);
  401. res = AVERROR_EXTERNAL;
  402. goto error;
  403. }
  404. cu_res = dl_fn->cu_ctx_pop_current(&cu_context_curr);
  405. if (cu_res != CUDA_SUCCESS) {
  406. av_log(avctx, AV_LOG_FATAL, "Failed popping CUDA context: 0x%x\n", (int)cu_res);
  407. res = AVERROR_EXTERNAL;
  408. goto error;
  409. }
  410. encode_session_params.device = ctx->cu_context;
  411. encode_session_params.deviceType = NV_ENC_DEVICE_TYPE_CUDA;
  412. nv_status = p_nvenc->nvEncOpenEncodeSessionEx(&encode_session_params, &ctx->nvencoder);
  413. if (nv_status != NV_ENC_SUCCESS) {
  414. ctx->nvencoder = NULL;
  415. av_log(avctx, AV_LOG_FATAL, "OpenEncodeSessionEx failed: 0x%x - invalid license key?\n", (int)nv_status);
  416. res = AVERROR_EXTERNAL;
  417. goto error;
  418. }
  419. if (ctx->preset) {
  420. if (!strcmp(ctx->preset, "hp")) {
  421. encoder_preset = NV_ENC_PRESET_HP_GUID;
  422. } else if (!strcmp(ctx->preset, "hq")) {
  423. encoder_preset = NV_ENC_PRESET_HQ_GUID;
  424. } else if (!strcmp(ctx->preset, "bd")) {
  425. encoder_preset = NV_ENC_PRESET_BD_GUID;
  426. } else if (!strcmp(ctx->preset, "ll")) {
  427. encoder_preset = NV_ENC_PRESET_LOW_LATENCY_DEFAULT_GUID;
  428. isLL = 1;
  429. } else if (!strcmp(ctx->preset, "llhp")) {
  430. encoder_preset = NV_ENC_PRESET_LOW_LATENCY_HP_GUID;
  431. isLL = 1;
  432. } else if (!strcmp(ctx->preset, "llhq")) {
  433. encoder_preset = NV_ENC_PRESET_LOW_LATENCY_HQ_GUID;
  434. isLL = 1;
  435. } else if (!strcmp(ctx->preset, "default")) {
  436. encoder_preset = NV_ENC_PRESET_DEFAULT_GUID;
  437. } else {
  438. av_log(avctx, AV_LOG_FATAL, "Preset \"%s\" is unknown! Supported presets: hp, hq, bd, ll, llhp, llhq, default\n", ctx->preset);
  439. res = AVERROR(EINVAL);
  440. goto error;
  441. }
  442. }
  443. nv_status = p_nvenc->nvEncGetEncodePresetConfig(ctx->nvencoder, NV_ENC_CODEC_H264_GUID, encoder_preset, &preset_config);
  444. if (nv_status != NV_ENC_SUCCESS) {
  445. av_log(avctx, AV_LOG_FATAL, "GetEncodePresetConfig failed: 0x%x\n", (int)nv_status);
  446. res = AVERROR_EXTERNAL;
  447. goto error;
  448. }
  449. ctx->init_encode_params.encodeGUID = NV_ENC_CODEC_H264_GUID;
  450. ctx->init_encode_params.encodeHeight = avctx->height;
  451. ctx->init_encode_params.encodeWidth = avctx->width;
  452. ctx->init_encode_params.darHeight = avctx->height;
  453. ctx->init_encode_params.darWidth = avctx->width;
  454. ctx->init_encode_params.frameRateNum = avctx->time_base.den;
  455. ctx->init_encode_params.frameRateDen = avctx->time_base.num * avctx->ticks_per_frame;
  456. num_mbs = ((avctx->width + 15) >> 4) * ((avctx->height + 15) >> 4);
  457. ctx->max_surface_count = (num_mbs >= 8160) ? 32 : 48;
  458. ctx->init_encode_params.enableEncodeAsync = 0;
  459. ctx->init_encode_params.enablePTD = 1;
  460. ctx->init_encode_params.presetGUID = encoder_preset;
  461. ctx->init_encode_params.encodeConfig = &ctx->encode_config;
  462. memcpy(&ctx->encode_config, &preset_config.presetCfg, sizeof(ctx->encode_config));
  463. ctx->encode_config.version = NV_ENC_CONFIG_VER;
  464. if (avctx->gop_size >= 0) {
  465. ctx->encode_config.gopLength = avctx->gop_size;
  466. ctx->encode_config.encodeCodecConfig.h264Config.idrPeriod = avctx->gop_size;
  467. }
  468. if (avctx->bit_rate > 0)
  469. ctx->encode_config.rcParams.averageBitRate = avctx->bit_rate;
  470. if (avctx->rc_max_rate > 0)
  471. ctx->encode_config.rcParams.maxBitRate = avctx->rc_max_rate;
  472. if (ctx->cbr) {
  473. if (!ctx->twopass) {
  474. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_CBR;
  475. } else if (ctx->twopass == 1 || isLL) {
  476. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_2_PASS_QUALITY;
  477. ctx->encode_config.encodeCodecConfig.h264Config.adaptiveTransformMode = NV_ENC_H264_ADAPTIVE_TRANSFORM_ENABLE;
  478. ctx->encode_config.encodeCodecConfig.h264Config.fmoMode = NV_ENC_H264_FMO_DISABLE;
  479. if (!isLL)
  480. av_log(avctx, AV_LOG_WARNING, "Twopass mode is only known to work with low latency (ll, llhq, llhp) presets.\n");
  481. } else {
  482. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_CBR;
  483. }
  484. } else if (avctx->global_quality > 0) {
  485. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_CONSTQP;
  486. ctx->encode_config.rcParams.constQP.qpInterB = avctx->global_quality;
  487. ctx->encode_config.rcParams.constQP.qpInterP = avctx->global_quality;
  488. ctx->encode_config.rcParams.constQP.qpIntra = avctx->global_quality;
  489. avctx->qmin = -1;
  490. avctx->qmax = -1;
  491. } else if (avctx->qmin >= 0 && avctx->qmax >= 0) {
  492. ctx->encode_config.rcParams.rateControlMode = NV_ENC_PARAMS_RC_VBR;
  493. ctx->encode_config.rcParams.enableMinQP = 1;
  494. ctx->encode_config.rcParams.enableMaxQP = 1;
  495. ctx->encode_config.rcParams.minQP.qpInterB = avctx->qmin;
  496. ctx->encode_config.rcParams.minQP.qpInterP = avctx->qmin;
  497. ctx->encode_config.rcParams.minQP.qpIntra = avctx->qmin;
  498. ctx->encode_config.rcParams.maxQP.qpInterB = avctx->qmax;
  499. ctx->encode_config.rcParams.maxQP.qpInterP = avctx->qmax;
  500. ctx->encode_config.rcParams.maxQP.qpIntra = avctx->qmax;
  501. }
  502. if (avctx->rc_buffer_size > 0)
  503. ctx->encode_config.rcParams.vbvBufferSize = avctx->rc_buffer_size;
  504. if (avctx->flags & CODEC_FLAG_INTERLACED_DCT) {
  505. ctx->encode_config.frameFieldMode = NV_ENC_PARAMS_FRAME_FIELD_MODE_FIELD;
  506. } else {
  507. ctx->encode_config.frameFieldMode = NV_ENC_PARAMS_FRAME_FIELD_MODE_FRAME;
  508. }
  509. switch (avctx->profile) {
  510. case FF_PROFILE_H264_BASELINE:
  511. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_BASELINE_GUID;
  512. break;
  513. case FF_PROFILE_H264_MAIN:
  514. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_MAIN_GUID;
  515. break;
  516. case FF_PROFILE_H264_HIGH:
  517. case FF_PROFILE_UNKNOWN:
  518. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_HIGH_GUID;
  519. break;
  520. default:
  521. av_log(avctx, AV_LOG_WARNING, "Unsupported h264 profile requested, falling back to high\n");
  522. ctx->encode_config.profileGUID = NV_ENC_H264_PROFILE_HIGH_GUID;
  523. break;
  524. }
  525. if (ctx->gobpattern >= 0) {
  526. ctx->encode_config.frameIntervalP = ctx->gobpattern;
  527. }
  528. // when there're b frames, set dts offset
  529. if (ctx->encode_config.frameIntervalP >= 2) {
  530. ctx->last_dts = -2;
  531. }
  532. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.colourDescriptionPresentFlag = 1;
  533. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.videoSignalTypePresentFlag = 1;
  534. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.colourMatrix = avctx->colorspace;
  535. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.colourPrimaries = avctx->color_primaries;
  536. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.transferCharacteristics = avctx->color_trc;
  537. ctx->encode_config.encodeCodecConfig.h264Config.h264VUIParameters.videoFullRangeFlag = avctx->color_range == AVCOL_RANGE_JPEG;
  538. ctx->encode_config.encodeCodecConfig.h264Config.disableSPSPPS = (avctx->flags & CODEC_FLAG_GLOBAL_HEADER) ? 1 : 0;
  539. ctx->encode_config.encodeCodecConfig.h264Config.repeatSPSPPS = (avctx->flags & CODEC_FLAG_GLOBAL_HEADER) ? 0 : 1;
  540. nv_status = p_nvenc->nvEncInitializeEncoder(ctx->nvencoder, &ctx->init_encode_params);
  541. if (nv_status != NV_ENC_SUCCESS) {
  542. av_log(avctx, AV_LOG_FATAL, "InitializeEncoder failed: 0x%x\n", (int)nv_status);
  543. res = AVERROR_EXTERNAL;
  544. goto error;
  545. }
  546. ctx->input_surfaces = av_malloc(ctx->max_surface_count * sizeof(*ctx->input_surfaces));
  547. if (!ctx->input_surfaces) {
  548. res = AVERROR(ENOMEM);
  549. goto error;
  550. }
  551. ctx->output_surfaces = av_malloc(ctx->max_surface_count * sizeof(*ctx->output_surfaces));
  552. if (!ctx->output_surfaces) {
  553. res = AVERROR(ENOMEM);
  554. goto error;
  555. }
  556. for (surfaceCount = 0; surfaceCount < ctx->max_surface_count; ++surfaceCount) {
  557. NV_ENC_CREATE_INPUT_BUFFER allocSurf = { 0 };
  558. NV_ENC_CREATE_BITSTREAM_BUFFER allocOut = { 0 };
  559. allocSurf.version = NV_ENC_CREATE_INPUT_BUFFER_VER;
  560. allocOut.version = NV_ENC_CREATE_BITSTREAM_BUFFER_VER;
  561. allocSurf.width = (avctx->width + 31) & ~31;
  562. allocSurf.height = (avctx->height + 31) & ~31;
  563. allocSurf.memoryHeap = NV_ENC_MEMORY_HEAP_SYSMEM_CACHED;
  564. switch (avctx->pix_fmt) {
  565. case AV_PIX_FMT_YUV420P:
  566. allocSurf.bufferFmt = NV_ENC_BUFFER_FORMAT_YV12_PL;
  567. break;
  568. case AV_PIX_FMT_NV12:
  569. allocSurf.bufferFmt = NV_ENC_BUFFER_FORMAT_NV12_PL;
  570. break;
  571. case AV_PIX_FMT_YUV444P:
  572. allocSurf.bufferFmt = NV_ENC_BUFFER_FORMAT_YUV444_PL;
  573. break;
  574. default:
  575. av_log(avctx, AV_LOG_FATAL, "Invalid input pixel format\n");
  576. res = AVERROR(EINVAL);
  577. goto error;
  578. }
  579. nv_status = p_nvenc->nvEncCreateInputBuffer(ctx->nvencoder, &allocSurf);
  580. if (nv_status = NV_ENC_SUCCESS){
  581. av_log(avctx, AV_LOG_FATAL, "CreateInputBuffer failed\n");
  582. res = AVERROR_EXTERNAL;
  583. goto error;
  584. }
  585. ctx->input_surfaces[surfaceCount].lockCount = 0;
  586. ctx->input_surfaces[surfaceCount].input_surface = allocSurf.inputBuffer;
  587. ctx->input_surfaces[surfaceCount].format = allocSurf.bufferFmt;
  588. ctx->input_surfaces[surfaceCount].width = allocSurf.width;
  589. ctx->input_surfaces[surfaceCount].height = allocSurf.height;
  590. /* 1MB is large enough to hold most output frames. NVENC increases this automaticaly if it's not enough. */
  591. allocOut.size = 1024 * 1024;
  592. allocOut.memoryHeap = NV_ENC_MEMORY_HEAP_SYSMEM_CACHED;
  593. nv_status = p_nvenc->nvEncCreateBitstreamBuffer(ctx->nvencoder, &allocOut);
  594. if (nv_status = NV_ENC_SUCCESS) {
  595. av_log(avctx, AV_LOG_FATAL, "CreateBitstreamBuffer failed\n");
  596. ctx->output_surfaces[surfaceCount++].output_surface = NULL;
  597. res = AVERROR_EXTERNAL;
  598. goto error;
  599. }
  600. ctx->output_surfaces[surfaceCount].output_surface = allocOut.bitstreamBuffer;
  601. ctx->output_surfaces[surfaceCount].size = allocOut.size;
  602. ctx->output_surfaces[surfaceCount].busy = 0;
  603. }
  604. if (avctx->flags & CODEC_FLAG_GLOBAL_HEADER) {
  605. uint32_t outSize = 0;
  606. char tmpHeader[256];
  607. NV_ENC_SEQUENCE_PARAM_PAYLOAD payload = { 0 };
  608. payload.version = NV_ENC_SEQUENCE_PARAM_PAYLOAD_VER;
  609. payload.spsppsBuffer = tmpHeader;
  610. payload.inBufferSize = sizeof(tmpHeader);
  611. payload.outSPSPPSPayloadSize = &outSize;
  612. nv_status = p_nvenc->nvEncGetSequenceParams(ctx->nvencoder, &payload);
  613. if (nv_status != NV_ENC_SUCCESS) {
  614. av_log(avctx, AV_LOG_FATAL, "GetSequenceParams failed\n");
  615. goto error;
  616. }
  617. avctx->extradata_size = outSize;
  618. avctx->extradata = av_mallocz(outSize + FF_INPUT_BUFFER_PADDING_SIZE);
  619. if (!avctx->extradata) {
  620. res = AVERROR(ENOMEM);
  621. goto error;
  622. }
  623. memcpy(avctx->extradata, tmpHeader, outSize);
  624. }
  625. if (ctx->encode_config.frameIntervalP > 1)
  626. avctx->has_b_frames = 2;
  627. if (ctx->encode_config.rcParams.averageBitRate > 0)
  628. avctx->bit_rate = ctx->encode_config.rcParams.averageBitRate;
  629. return 0;
  630. error:
  631. for (i = 0; i < surfaceCount; ++i) {
  632. p_nvenc->nvEncDestroyInputBuffer(ctx->nvencoder, ctx->input_surfaces[i].input_surface);
  633. if (ctx->output_surfaces[i].output_surface)
  634. p_nvenc->nvEncDestroyBitstreamBuffer(ctx->nvencoder, ctx->output_surfaces[i].output_surface);
  635. }
  636. if (ctx->nvencoder)
  637. p_nvenc->nvEncDestroyEncoder(ctx->nvencoder);
  638. if (ctx->cu_context)
  639. dl_fn->cu_ctx_destroy(ctx->cu_context);
  640. av_frame_free(&avctx->coded_frame);
  641. nvenc_unload_nvenc(avctx);
  642. ctx->nvencoder = NULL;
  643. ctx->cu_context = NULL;
  644. return res;
  645. }
  646. static av_cold int nvenc_encode_close(AVCodecContext *avctx)
  647. {
  648. NvencContext *ctx = avctx->priv_data;
  649. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  650. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  651. int i;
  652. av_freep(&ctx->timestamp_list.data);
  653. av_freep(&ctx->output_surface_ready_queue.data);
  654. av_freep(&ctx->output_surface_queue.data);
  655. for (i = 0; i < ctx->max_surface_count; ++i) {
  656. p_nvenc->nvEncDestroyInputBuffer(ctx->nvencoder, ctx->input_surfaces[i].input_surface);
  657. p_nvenc->nvEncDestroyBitstreamBuffer(ctx->nvencoder, ctx->output_surfaces[i].output_surface);
  658. }
  659. ctx->max_surface_count = 0;
  660. p_nvenc->nvEncDestroyEncoder(ctx->nvencoder);
  661. ctx->nvencoder = NULL;
  662. dl_fn->cu_ctx_destroy(ctx->cu_context);
  663. ctx->cu_context = NULL;
  664. nvenc_unload_nvenc(avctx);
  665. av_frame_free(&avctx->coded_frame);
  666. return 0;
  667. }
  668. static int process_output_surface(AVCodecContext *avctx, AVPacket *pkt, AVFrame *coded_frame, NvencOutputSurface *tmpoutsurf)
  669. {
  670. NvencContext *ctx = avctx->priv_data;
  671. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  672. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  673. uint32_t *slice_offsets = av_mallocz(ctx->encode_config.encodeCodecConfig.h264Config.sliceModeData * sizeof(*slice_offsets));
  674. NV_ENC_LOCK_BITSTREAM lock_params = { 0 };
  675. NVENCSTATUS nv_status;
  676. int res = 0;
  677. if (!slice_offsets)
  678. return AVERROR(ENOMEM);
  679. lock_params.version = NV_ENC_LOCK_BITSTREAM_VER;
  680. lock_params.doNotWait = 0;
  681. lock_params.outputBitstream = tmpoutsurf->output_surface;
  682. lock_params.sliceOffsets = slice_offsets;
  683. nv_status = p_nvenc->nvEncLockBitstream(ctx->nvencoder, &lock_params);
  684. if (nv_status != NV_ENC_SUCCESS) {
  685. av_log(avctx, AV_LOG_ERROR, "Failed locking bitstream buffer\n");
  686. res = AVERROR_EXTERNAL;
  687. goto error;
  688. }
  689. if (res = ff_alloc_packet2(avctx, pkt, lock_params.bitstreamSizeInBytes)) {
  690. p_nvenc->nvEncUnlockBitstream(ctx->nvencoder, tmpoutsurf->output_surface);
  691. goto error;
  692. }
  693. memcpy(pkt->data, lock_params.bitstreamBufferPtr, lock_params.bitstreamSizeInBytes);
  694. nv_status = p_nvenc->nvEncUnlockBitstream(ctx->nvencoder, tmpoutsurf->output_surface);
  695. if (nv_status != NV_ENC_SUCCESS)
  696. av_log(avctx, AV_LOG_ERROR, "Failed unlocking bitstream buffer, expect the gates of mordor to open\n");
  697. switch (lock_params.pictureType) {
  698. case NV_ENC_PIC_TYPE_IDR:
  699. pkt->flags |= AV_PKT_FLAG_KEY;
  700. case NV_ENC_PIC_TYPE_I:
  701. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
  702. break;
  703. case NV_ENC_PIC_TYPE_P:
  704. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P;
  705. break;
  706. case NV_ENC_PIC_TYPE_B:
  707. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B;
  708. break;
  709. case NV_ENC_PIC_TYPE_BI:
  710. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_BI;
  711. break;
  712. default:
  713. av_log(avctx, AV_LOG_ERROR, "Unknown picture type encountered, expect the output to be broken.\n");
  714. av_log(avctx, AV_LOG_ERROR, "Please report this error and include as much information on how to reproduce it as possible.\n");
  715. res = AVERROR_EXTERNAL;
  716. goto error;
  717. }
  718. pkt->pts = lock_params.outputTimeStamp;
  719. pkt->dts = timestamp_queue_dequeue(&ctx->timestamp_list);
  720. // when there're b frame(s), set dts offset
  721. if (ctx->encode_config.frameIntervalP >= 2)
  722. pkt->dts -= 1;
  723. if (pkt->dts > pkt->pts)
  724. pkt->dts = pkt->pts;
  725. if (ctx->last_dts != AV_NOPTS_VALUE && pkt->dts <= ctx->last_dts)
  726. pkt->dts = ctx->last_dts + 1;
  727. ctx->last_dts = pkt->dts;
  728. av_free(slice_offsets);
  729. return 0;
  730. error:
  731. av_free(slice_offsets);
  732. timestamp_queue_dequeue(&ctx->timestamp_list);
  733. return res;
  734. }
  735. static int nvenc_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
  736. const AVFrame *frame, int *got_packet)
  737. {
  738. NVENCSTATUS nv_status;
  739. NvencOutputSurface *tmpoutsurf;
  740. int res, i = 0;
  741. NvencContext *ctx = avctx->priv_data;
  742. NvencDynLoadFunctions *dl_fn = &ctx->nvenc_dload_funcs;
  743. NV_ENCODE_API_FUNCTION_LIST *p_nvenc = &dl_fn->nvenc_funcs;
  744. NV_ENC_PIC_PARAMS pic_params = { 0 };
  745. pic_params.version = NV_ENC_PIC_PARAMS_VER;
  746. if (frame) {
  747. NV_ENC_LOCK_INPUT_BUFFER lockBufferParams = { 0 };
  748. NvencInputSurface *inSurf = NULL;
  749. for (i = 0; i < ctx->max_surface_count; ++i) {
  750. if (!ctx->input_surfaces[i].lockCount) {
  751. inSurf = &ctx->input_surfaces[i];
  752. break;
  753. }
  754. }
  755. av_assert0(inSurf);
  756. inSurf->lockCount = 1;
  757. lockBufferParams.version = NV_ENC_LOCK_INPUT_BUFFER_VER;
  758. lockBufferParams.inputBuffer = inSurf->input_surface;
  759. nv_status = p_nvenc->nvEncLockInputBuffer(ctx->nvencoder, &lockBufferParams);
  760. if (nv_status != NV_ENC_SUCCESS) {
  761. av_log(avctx, AV_LOG_ERROR, "Failed locking nvenc input buffer\n");
  762. return 0;
  763. }
  764. if (avctx->pix_fmt == AV_PIX_FMT_YUV420P) {
  765. uint8_t *buf = lockBufferParams.bufferDataPtr;
  766. av_image_copy_plane(buf, lockBufferParams.pitch,
  767. frame->data[0], frame->linesize[0],
  768. avctx->width, avctx->height);
  769. buf += inSurf->height * lockBufferParams.pitch;
  770. av_image_copy_plane(buf, lockBufferParams.pitch >> 1,
  771. frame->data[2], frame->linesize[2],
  772. avctx->width >> 1, avctx->height >> 1);
  773. buf += (inSurf->height * lockBufferParams.pitch) >> 2;
  774. av_image_copy_plane(buf, lockBufferParams.pitch >> 1,
  775. frame->data[1], frame->linesize[1],
  776. avctx->width >> 1, avctx->height >> 1);
  777. } else if (avctx->pix_fmt == AV_PIX_FMT_NV12) {
  778. uint8_t *buf = lockBufferParams.bufferDataPtr;
  779. av_image_copy_plane(buf, lockBufferParams.pitch,
  780. frame->data[0], frame->linesize[0],
  781. avctx->width, avctx->height);
  782. buf += inSurf->height * lockBufferParams.pitch;
  783. av_image_copy_plane(buf, lockBufferParams.pitch,
  784. frame->data[1], frame->linesize[1],
  785. avctx->width, avctx->height >> 1);
  786. } else if (avctx->pix_fmt == AV_PIX_FMT_YUV444P) {
  787. uint8_t *buf = lockBufferParams.bufferDataPtr;
  788. av_image_copy_plane(buf, lockBufferParams.pitch,
  789. frame->data[0], frame->linesize[0],
  790. avctx->width, avctx->height);
  791. buf += inSurf->height * lockBufferParams.pitch;
  792. av_image_copy_plane(buf, lockBufferParams.pitch,
  793. frame->data[1], frame->linesize[1],
  794. avctx->width, avctx->height);
  795. buf += inSurf->height * lockBufferParams.pitch;
  796. av_image_copy_plane(buf, lockBufferParams.pitch,
  797. frame->data[2], frame->linesize[2],
  798. avctx->width, avctx->height);
  799. } else {
  800. av_log(avctx, AV_LOG_FATAL, "Invalid pixel format!\n");
  801. return AVERROR(EINVAL);
  802. }
  803. nv_status = p_nvenc->nvEncUnlockInputBuffer(ctx->nvencoder, inSurf->input_surface);
  804. if (nv_status != NV_ENC_SUCCESS) {
  805. av_log(avctx, AV_LOG_FATAL, "Failed unlocking input buffer!\n");
  806. return AVERROR_EXTERNAL;
  807. }
  808. for (i = 0; i < ctx->max_surface_count; ++i)
  809. if (!ctx->output_surfaces[i].busy)
  810. break;
  811. if (i == ctx->max_surface_count) {
  812. inSurf->lockCount = 0;
  813. av_log(avctx, AV_LOG_FATAL, "No free output surface found!\n");
  814. return AVERROR_EXTERNAL;
  815. }
  816. ctx->output_surfaces[i].input_surface = inSurf;
  817. pic_params.inputBuffer = inSurf->input_surface;
  818. pic_params.bufferFmt = inSurf->format;
  819. pic_params.inputWidth = avctx->width;
  820. pic_params.inputHeight = avctx->height;
  821. pic_params.outputBitstream = ctx->output_surfaces[i].output_surface;
  822. pic_params.completionEvent = 0;
  823. if (avctx->flags & CODEC_FLAG_INTERLACED_DCT) {
  824. if (frame->top_field_first) {
  825. pic_params.pictureStruct = NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM;
  826. } else {
  827. pic_params.pictureStruct = NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP;
  828. }
  829. } else {
  830. pic_params.pictureStruct = NV_ENC_PIC_STRUCT_FRAME;
  831. }
  832. pic_params.encodePicFlags = 0;
  833. pic_params.inputTimeStamp = frame->pts;
  834. pic_params.inputDuration = 0;
  835. pic_params.codecPicParams.h264PicParams.sliceMode = ctx->encode_config.encodeCodecConfig.h264Config.sliceMode;
  836. pic_params.codecPicParams.h264PicParams.sliceModeData = ctx->encode_config.encodeCodecConfig.h264Config.sliceModeData;
  837. #if NVENCAPI_MAJOR_VERSION < 5
  838. memcpy(&pic_params.rcParams, &ctx->encode_config.rcParams, sizeof(NV_ENC_RC_PARAMS));
  839. #endif
  840. res = timestamp_queue_enqueue(&ctx->timestamp_list, frame->pts);
  841. if (res)
  842. return res;
  843. } else {
  844. pic_params.encodePicFlags = NV_ENC_PIC_FLAG_EOS;
  845. }
  846. nv_status = p_nvenc->nvEncEncodePicture(ctx->nvencoder, &pic_params);
  847. if (frame && nv_status == NV_ENC_ERR_NEED_MORE_INPUT) {
  848. res = out_surf_queue_enqueue(&ctx->output_surface_queue, &ctx->output_surfaces[i]);
  849. if (res)
  850. return res;
  851. ctx->output_surfaces[i].busy = 1;
  852. }
  853. if (nv_status != NV_ENC_SUCCESS && nv_status != NV_ENC_ERR_NEED_MORE_INPUT) {
  854. av_log(avctx, AV_LOG_ERROR, "EncodePicture failed!\n");
  855. return AVERROR_EXTERNAL;
  856. }
  857. if (nv_status != NV_ENC_ERR_NEED_MORE_INPUT) {
  858. while (ctx->output_surface_queue.count) {
  859. tmpoutsurf = out_surf_queue_dequeue(&ctx->output_surface_queue);
  860. res = out_surf_queue_enqueue(&ctx->output_surface_ready_queue, tmpoutsurf);
  861. if (res)
  862. return res;
  863. }
  864. if (frame) {
  865. res = out_surf_queue_enqueue(&ctx->output_surface_ready_queue, &ctx->output_surfaces[i]);
  866. if (res)
  867. return res;
  868. ctx->output_surfaces[i].busy = 1;
  869. }
  870. }
  871. if (ctx->output_surface_ready_queue.count) {
  872. tmpoutsurf = out_surf_queue_dequeue(&ctx->output_surface_ready_queue);
  873. res = process_output_surface(avctx, pkt, avctx->coded_frame, tmpoutsurf);
  874. if (res)
  875. return res;
  876. tmpoutsurf->busy = 0;
  877. av_assert0(tmpoutsurf->input_surface->lockCount);
  878. tmpoutsurf->input_surface->lockCount--;
  879. *got_packet = 1;
  880. } else {
  881. *got_packet = 0;
  882. }
  883. return 0;
  884. }
  885. static enum AVPixelFormat pix_fmts_nvenc[] = {
  886. AV_PIX_FMT_NV12,
  887. AV_PIX_FMT_NONE
  888. };
  889. #define OFFSET(x) offsetof(NvencContext, x)
  890. #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
  891. static const AVOption options[] = {
  892. { "preset", "Set the encoding preset (one of hq, hp, bd, ll, llhq, llhp, default)", OFFSET(preset), AV_OPT_TYPE_STRING, { .str = "hq" }, 0, 0, VE },
  893. { "cbr", "Use cbr encoding mode", OFFSET(cbr), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
  894. { "2pass", "Use 2pass cbr encoding mode (low latency mode only)", OFFSET(twopass), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 1, VE },
  895. { "goppattern", "Specifies the GOP pattern as follows: 0: I, 1: IPP, 2: IBP, 3: IBBP", OFFSET(gobpattern), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 3, VE },
  896. { "gpu", "Selects which NVENC capable GPU to use. First GPU is 0, second is 1, and so on.", OFFSET(gpu), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, VE },
  897. { NULL }
  898. };
  899. static const AVClass nvenc_class = {
  900. .class_name = "nvenc",
  901. .item_name = av_default_item_name,
  902. .option = options,
  903. .version = LIBAVUTIL_VERSION_INT,
  904. };
  905. static const AVCodecDefault nvenc_defaults[] = {
  906. { "b", "0" },
  907. { "qmin", "-1" },
  908. { "qmax", "-1" },
  909. { "qdiff", "-1" },
  910. { "qblur", "-1" },
  911. { "qcomp", "-1" },
  912. { NULL },
  913. };
  914. AVCodec ff_nvenc_encoder = {
  915. .name = "nvenc",
  916. .long_name = NULL_IF_CONFIG_SMALL("Nvidia NVENC h264 encoder"),
  917. .type = AVMEDIA_TYPE_VIDEO,
  918. .id = AV_CODEC_ID_H264,
  919. .priv_data_size = sizeof(NvencContext),
  920. .init = nvenc_encode_init,
  921. .encode2 = nvenc_encode_frame,
  922. .close = nvenc_encode_close,
  923. .capabilities = CODEC_CAP_DELAY,
  924. .priv_class = &nvenc_class,
  925. .defaults = nvenc_defaults,
  926. .pix_fmts = pix_fmts_nvenc,
  927. };