You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

640 lines
22KB

  1. /*
  2. * Blackmagic DeckLink output
  3. * Copyright (c) 2013-2014 Ramiro Polla
  4. *
  5. * This file is part of FFmpeg.
  6. *
  7. * FFmpeg is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * FFmpeg is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with FFmpeg; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. #include <atomic>
  22. using std::atomic;
  23. /* Include internal.h first to avoid conflict between winsock.h (used by
  24. * DeckLink headers) and winsock2.h (used by libavformat) in MSVC++ builds */
  25. extern "C" {
  26. #include "libavformat/internal.h"
  27. }
  28. #include <DeckLinkAPI.h>
  29. extern "C" {
  30. #include "libavformat/avformat.h"
  31. #include "libavutil/imgutils.h"
  32. #include "avdevice.h"
  33. }
  34. #include "decklink_common.h"
  35. #include "decklink_enc.h"
  36. #if CONFIG_LIBKLVANC
  37. #include "libklvanc/vanc.h"
  38. #include "libklvanc/vanc-lines.h"
  39. #include "libklvanc/pixels.h"
  40. #endif
  41. /* DeckLink callback class declaration */
  42. class decklink_frame : public IDeckLinkVideoFrame
  43. {
  44. public:
  45. decklink_frame(struct decklink_ctx *ctx, AVFrame *avframe, AVCodecID codec_id, int height, int width) :
  46. _ctx(ctx), _avframe(avframe), _avpacket(NULL), _codec_id(codec_id), _ancillary(NULL), _height(height), _width(width), _refs(1) { }
  47. decklink_frame(struct decklink_ctx *ctx, AVPacket *avpacket, AVCodecID codec_id, int height, int width) :
  48. _ctx(ctx), _avframe(NULL), _avpacket(avpacket), _codec_id(codec_id), _ancillary(NULL), _height(height), _width(width), _refs(1) { }
  49. virtual long STDMETHODCALLTYPE GetWidth (void) { return _width; }
  50. virtual long STDMETHODCALLTYPE GetHeight (void) { return _height; }
  51. virtual long STDMETHODCALLTYPE GetRowBytes (void)
  52. {
  53. if (_codec_id == AV_CODEC_ID_WRAPPED_AVFRAME)
  54. return _avframe->linesize[0] < 0 ? -_avframe->linesize[0] : _avframe->linesize[0];
  55. else
  56. return ((GetWidth() + 47) / 48) * 128;
  57. }
  58. virtual BMDPixelFormat STDMETHODCALLTYPE GetPixelFormat(void)
  59. {
  60. if (_codec_id == AV_CODEC_ID_WRAPPED_AVFRAME)
  61. return bmdFormat8BitYUV;
  62. else
  63. return bmdFormat10BitYUV;
  64. }
  65. virtual BMDFrameFlags STDMETHODCALLTYPE GetFlags (void)
  66. {
  67. if (_codec_id == AV_CODEC_ID_WRAPPED_AVFRAME)
  68. return _avframe->linesize[0] < 0 ? bmdFrameFlagFlipVertical : bmdFrameFlagDefault;
  69. else
  70. return bmdFrameFlagDefault;
  71. }
  72. virtual HRESULT STDMETHODCALLTYPE GetBytes (void **buffer)
  73. {
  74. if (_codec_id == AV_CODEC_ID_WRAPPED_AVFRAME) {
  75. if (_avframe->linesize[0] < 0)
  76. *buffer = (void *)(_avframe->data[0] + _avframe->linesize[0] * (_avframe->height - 1));
  77. else
  78. *buffer = (void *)(_avframe->data[0]);
  79. } else {
  80. *buffer = (void *)(_avpacket->data);
  81. }
  82. return S_OK;
  83. }
  84. virtual HRESULT STDMETHODCALLTYPE GetTimecode (BMDTimecodeFormat format, IDeckLinkTimecode **timecode) { return S_FALSE; }
  85. virtual HRESULT STDMETHODCALLTYPE GetAncillaryData(IDeckLinkVideoFrameAncillary **ancillary)
  86. {
  87. *ancillary = _ancillary;
  88. if (_ancillary) {
  89. _ancillary->AddRef();
  90. return S_OK;
  91. } else {
  92. return S_FALSE;
  93. }
  94. }
  95. virtual HRESULT STDMETHODCALLTYPE SetAncillaryData(IDeckLinkVideoFrameAncillary *ancillary)
  96. {
  97. if (_ancillary)
  98. _ancillary->Release();
  99. _ancillary = ancillary;
  100. _ancillary->AddRef();
  101. return S_OK;
  102. }
  103. virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; }
  104. virtual ULONG STDMETHODCALLTYPE AddRef(void) { return ++_refs; }
  105. virtual ULONG STDMETHODCALLTYPE Release(void)
  106. {
  107. int ret = --_refs;
  108. if (!ret) {
  109. av_frame_free(&_avframe);
  110. av_packet_free(&_avpacket);
  111. if (_ancillary)
  112. _ancillary->Release();
  113. delete this;
  114. }
  115. return ret;
  116. }
  117. struct decklink_ctx *_ctx;
  118. AVFrame *_avframe;
  119. AVPacket *_avpacket;
  120. AVCodecID _codec_id;
  121. IDeckLinkVideoFrameAncillary *_ancillary;
  122. int _height;
  123. int _width;
  124. private:
  125. std::atomic<int> _refs;
  126. };
  127. class decklink_output_callback : public IDeckLinkVideoOutputCallback
  128. {
  129. public:
  130. virtual HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted(IDeckLinkVideoFrame *_frame, BMDOutputFrameCompletionResult result)
  131. {
  132. decklink_frame *frame = static_cast<decklink_frame *>(_frame);
  133. struct decklink_ctx *ctx = frame->_ctx;
  134. if (frame->_avframe)
  135. av_frame_unref(frame->_avframe);
  136. if (frame->_avpacket)
  137. av_packet_unref(frame->_avpacket);
  138. pthread_mutex_lock(&ctx->mutex);
  139. ctx->frames_buffer_available_spots++;
  140. pthread_cond_broadcast(&ctx->cond);
  141. pthread_mutex_unlock(&ctx->mutex);
  142. return S_OK;
  143. }
  144. virtual HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped(void) { return S_OK; }
  145. virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; }
  146. virtual ULONG STDMETHODCALLTYPE AddRef(void) { return 1; }
  147. virtual ULONG STDMETHODCALLTYPE Release(void) { return 1; }
  148. };
  149. static int decklink_setup_video(AVFormatContext *avctx, AVStream *st)
  150. {
  151. struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
  152. struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
  153. AVCodecParameters *c = st->codecpar;
  154. if (ctx->video) {
  155. av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
  156. return -1;
  157. }
  158. if (c->codec_id == AV_CODEC_ID_WRAPPED_AVFRAME) {
  159. if (c->format != AV_PIX_FMT_UYVY422) {
  160. av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
  161. " Only AV_PIX_FMT_UYVY422 is supported.\n");
  162. return -1;
  163. }
  164. ctx->raw_format = bmdFormat8BitYUV;
  165. } else if (c->codec_id != AV_CODEC_ID_V210) {
  166. av_log(avctx, AV_LOG_ERROR, "Unsupported codec type!"
  167. " Only V210 and wrapped frame with AV_PIX_FMT_UYVY422 are supported.\n");
  168. return -1;
  169. } else {
  170. ctx->raw_format = bmdFormat10BitYUV;
  171. }
  172. if (ff_decklink_set_configs(avctx, DIRECTION_OUT) < 0) {
  173. av_log(avctx, AV_LOG_ERROR, "Could not set output configuration\n");
  174. return -1;
  175. }
  176. if (ff_decklink_set_format(avctx, c->width, c->height,
  177. st->time_base.num, st->time_base.den, c->field_order)) {
  178. av_log(avctx, AV_LOG_ERROR, "Unsupported video size, framerate or field order!"
  179. " Check available formats with -list_formats 1.\n");
  180. return -1;
  181. }
  182. if (ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputVANC) != S_OK) {
  183. av_log(avctx, AV_LOG_WARNING, "Could not enable video output with VANC! Trying without...\n");
  184. ctx->supports_vanc = 0;
  185. }
  186. if (!ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputFlagDefault) != S_OK) {
  187. av_log(avctx, AV_LOG_ERROR, "Could not enable video output!\n");
  188. return -1;
  189. }
  190. /* Set callback. */
  191. ctx->output_callback = new decklink_output_callback();
  192. ctx->dlo->SetScheduledFrameCompletionCallback(ctx->output_callback);
  193. ctx->frames_preroll = st->time_base.den * ctx->preroll;
  194. if (st->time_base.den > 1000)
  195. ctx->frames_preroll /= 1000;
  196. /* Buffer twice as many frames as the preroll. */
  197. ctx->frames_buffer = ctx->frames_preroll * 2;
  198. ctx->frames_buffer = FFMIN(ctx->frames_buffer, 60);
  199. pthread_mutex_init(&ctx->mutex, NULL);
  200. pthread_cond_init(&ctx->cond, NULL);
  201. ctx->frames_buffer_available_spots = ctx->frames_buffer;
  202. av_log(avctx, AV_LOG_DEBUG, "output: %s, preroll: %d, frames buffer size: %d\n",
  203. avctx->url, ctx->frames_preroll, ctx->frames_buffer);
  204. /* The device expects the framerate to be fixed. */
  205. avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
  206. ctx->video = 1;
  207. return 0;
  208. }
  209. static int decklink_setup_audio(AVFormatContext *avctx, AVStream *st)
  210. {
  211. struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
  212. struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
  213. AVCodecParameters *c = st->codecpar;
  214. if (ctx->audio) {
  215. av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
  216. return -1;
  217. }
  218. if (c->sample_rate != 48000) {
  219. av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
  220. " Only 48kHz is supported.\n");
  221. return -1;
  222. }
  223. if (c->channels != 2 && c->channels != 8 && c->channels != 16) {
  224. av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
  225. " Only 2, 8 or 16 channels are supported.\n");
  226. return -1;
  227. }
  228. if (ctx->dlo->EnableAudioOutput(bmdAudioSampleRate48kHz,
  229. bmdAudioSampleType16bitInteger,
  230. c->channels,
  231. bmdAudioOutputStreamTimestamped) != S_OK) {
  232. av_log(avctx, AV_LOG_ERROR, "Could not enable audio output!\n");
  233. return -1;
  234. }
  235. if (ctx->dlo->BeginAudioPreroll() != S_OK) {
  236. av_log(avctx, AV_LOG_ERROR, "Could not begin audio preroll!\n");
  237. return -1;
  238. }
  239. /* The device expects the sample rate to be fixed. */
  240. avpriv_set_pts_info(st, 64, 1, c->sample_rate);
  241. ctx->channels = c->channels;
  242. ctx->audio = 1;
  243. return 0;
  244. }
  245. av_cold int ff_decklink_write_trailer(AVFormatContext *avctx)
  246. {
  247. struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
  248. struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
  249. if (ctx->playback_started) {
  250. BMDTimeValue actual;
  251. ctx->dlo->StopScheduledPlayback(ctx->last_pts * ctx->bmd_tb_num,
  252. &actual, ctx->bmd_tb_den);
  253. ctx->dlo->DisableVideoOutput();
  254. if (ctx->audio)
  255. ctx->dlo->DisableAudioOutput();
  256. }
  257. ff_decklink_cleanup(avctx);
  258. if (ctx->output_callback)
  259. delete ctx->output_callback;
  260. pthread_mutex_destroy(&ctx->mutex);
  261. pthread_cond_destroy(&ctx->cond);
  262. #if CONFIG_LIBKLVANC
  263. klvanc_context_destroy(ctx->vanc_ctx);
  264. #endif
  265. av_freep(&cctx->ctx);
  266. return 0;
  267. }
  268. #if CONFIG_LIBKLVANC
  269. static void construct_cc(AVFormatContext *avctx, struct decklink_ctx *ctx,
  270. AVPacket *pkt, struct klvanc_line_set_s *vanc_lines)
  271. {
  272. struct klvanc_packet_eia_708b_s *cdp;
  273. uint16_t *cdp_words;
  274. uint16_t len;
  275. uint8_t cc_count;
  276. int size, ret, i;
  277. const uint8_t *data = av_packet_get_side_data(pkt, AV_PKT_DATA_A53_CC, &size);
  278. if (!data)
  279. return;
  280. cc_count = size / 3;
  281. ret = klvanc_create_eia708_cdp(&cdp);
  282. if (ret)
  283. return;
  284. ret = klvanc_set_framerate_EIA_708B(cdp, ctx->bmd_tb_num, ctx->bmd_tb_den);
  285. if (ret) {
  286. av_log(avctx, AV_LOG_ERROR, "Invalid framerate specified: %lld/%lld\n",
  287. ctx->bmd_tb_num, ctx->bmd_tb_den);
  288. klvanc_destroy_eia708_cdp(cdp);
  289. return;
  290. }
  291. if (cc_count > KLVANC_MAX_CC_COUNT) {
  292. av_log(avctx, AV_LOG_ERROR, "Illegal cc_count received: %d\n", cc_count);
  293. cc_count = KLVANC_MAX_CC_COUNT;
  294. }
  295. /* CC data */
  296. cdp->header.ccdata_present = 1;
  297. cdp->header.caption_service_active = 1;
  298. cdp->ccdata.cc_count = cc_count;
  299. for (i = 0; i < cc_count; i++) {
  300. if (data [3*i] & 0x04)
  301. cdp->ccdata.cc[i].cc_valid = 1;
  302. cdp->ccdata.cc[i].cc_type = data[3*i] & 0x03;
  303. cdp->ccdata.cc[i].cc_data[0] = data[3*i+1];
  304. cdp->ccdata.cc[i].cc_data[1] = data[3*i+2];
  305. }
  306. klvanc_finalize_EIA_708B(cdp, ctx->cdp_sequence_num++);
  307. ret = klvanc_convert_EIA_708B_to_words(cdp, &cdp_words, &len);
  308. klvanc_destroy_eia708_cdp(cdp);
  309. if (ret != 0) {
  310. av_log(avctx, AV_LOG_ERROR, "Failed converting 708 packet to words\n");
  311. return;
  312. }
  313. ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, cdp_words, len, 11, 0);
  314. free(cdp_words);
  315. if (ret != 0) {
  316. av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
  317. return;
  318. }
  319. }
  320. static int decklink_construct_vanc(AVFormatContext *avctx, struct decklink_ctx *ctx,
  321. AVPacket *pkt, decklink_frame *frame)
  322. {
  323. struct klvanc_line_set_s vanc_lines = { 0 };
  324. int ret = 0, i;
  325. if (!ctx->supports_vanc)
  326. return 0;
  327. construct_cc(avctx, ctx, pkt, &vanc_lines);
  328. IDeckLinkVideoFrameAncillary *vanc;
  329. int result = ctx->dlo->CreateAncillaryData(bmdFormat10BitYUV, &vanc);
  330. if (result != S_OK) {
  331. av_log(avctx, AV_LOG_ERROR, "Failed to create vanc\n");
  332. ret = AVERROR(EIO);
  333. goto done;
  334. }
  335. /* Now that we've got all the VANC lines in a nice orderly manner, generate the
  336. final VANC sections for the Decklink output */
  337. for (i = 0; i < vanc_lines.num_lines; i++) {
  338. struct klvanc_line_s *line = vanc_lines.lines[i];
  339. int real_line;
  340. void *buf;
  341. if (!line)
  342. break;
  343. /* FIXME: include hack for certain Decklink cards which mis-represent
  344. line numbers for pSF frames */
  345. real_line = line->line_number;
  346. result = vanc->GetBufferForVerticalBlankingLine(real_line, &buf);
  347. if (result != S_OK) {
  348. av_log(avctx, AV_LOG_ERROR, "Failed to get VANC line %d: %d", real_line, result);
  349. continue;
  350. }
  351. /* Generate the full line taking into account all VANC packets on that line */
  352. result = klvanc_generate_vanc_line_v210(ctx->vanc_ctx, line, (uint8_t *) buf,
  353. ctx->bmd_width);
  354. if (result) {
  355. av_log(avctx, AV_LOG_ERROR, "Failed to generate VANC line\n");
  356. continue;
  357. }
  358. }
  359. result = frame->SetAncillaryData(vanc);
  360. vanc->Release();
  361. if (result != S_OK) {
  362. av_log(avctx, AV_LOG_ERROR, "Failed to set vanc: %d", result);
  363. ret = AVERROR(EIO);
  364. }
  365. done:
  366. for (i = 0; i < vanc_lines.num_lines; i++)
  367. klvanc_line_free(vanc_lines.lines[i]);
  368. return ret;
  369. }
  370. #endif
  371. static int decklink_write_video_packet(AVFormatContext *avctx, AVPacket *pkt)
  372. {
  373. struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
  374. struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
  375. AVStream *st = avctx->streams[pkt->stream_index];
  376. AVFrame *avframe = NULL, *tmp = (AVFrame *)pkt->data;
  377. AVPacket *avpacket = NULL;
  378. decklink_frame *frame;
  379. uint32_t buffered;
  380. HRESULT hr;
  381. if (st->codecpar->codec_id == AV_CODEC_ID_WRAPPED_AVFRAME) {
  382. if (tmp->format != AV_PIX_FMT_UYVY422 ||
  383. tmp->width != ctx->bmd_width ||
  384. tmp->height != ctx->bmd_height) {
  385. av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
  386. return AVERROR(EINVAL);
  387. }
  388. avframe = av_frame_clone(tmp);
  389. if (!avframe) {
  390. av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
  391. return AVERROR(EIO);
  392. }
  393. frame = new decklink_frame(ctx, avframe, st->codecpar->codec_id, avframe->height, avframe->width);
  394. } else {
  395. avpacket = av_packet_clone(pkt);
  396. if (!avpacket) {
  397. av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
  398. return AVERROR(EIO);
  399. }
  400. frame = new decklink_frame(ctx, avpacket, st->codecpar->codec_id, ctx->bmd_height, ctx->bmd_width);
  401. #if CONFIG_LIBKLVANC
  402. if (decklink_construct_vanc(avctx, ctx, pkt, frame))
  403. av_log(avctx, AV_LOG_ERROR, "Failed to construct VANC\n");
  404. #endif
  405. }
  406. if (!frame) {
  407. av_log(avctx, AV_LOG_ERROR, "Could not create new frame.\n");
  408. av_frame_free(&avframe);
  409. av_packet_free(&avpacket);
  410. return AVERROR(EIO);
  411. }
  412. /* Always keep at most one second of frames buffered. */
  413. pthread_mutex_lock(&ctx->mutex);
  414. while (ctx->frames_buffer_available_spots == 0) {
  415. pthread_cond_wait(&ctx->cond, &ctx->mutex);
  416. }
  417. ctx->frames_buffer_available_spots--;
  418. pthread_mutex_unlock(&ctx->mutex);
  419. /* Schedule frame for playback. */
  420. hr = ctx->dlo->ScheduleVideoFrame((class IDeckLinkVideoFrame *) frame,
  421. pkt->pts * ctx->bmd_tb_num,
  422. ctx->bmd_tb_num, ctx->bmd_tb_den);
  423. /* Pass ownership to DeckLink, or release on failure */
  424. frame->Release();
  425. if (hr != S_OK) {
  426. av_log(avctx, AV_LOG_ERROR, "Could not schedule video frame."
  427. " error %08x.\n", (uint32_t) hr);
  428. return AVERROR(EIO);
  429. }
  430. ctx->dlo->GetBufferedVideoFrameCount(&buffered);
  431. av_log(avctx, AV_LOG_DEBUG, "Buffered video frames: %d.\n", (int) buffered);
  432. if (pkt->pts > 2 && buffered <= 2)
  433. av_log(avctx, AV_LOG_WARNING, "There are not enough buffered video frames."
  434. " Video may misbehave!\n");
  435. /* Preroll video frames. */
  436. if (!ctx->playback_started && pkt->pts > ctx->frames_preroll) {
  437. av_log(avctx, AV_LOG_DEBUG, "Ending audio preroll.\n");
  438. if (ctx->audio && ctx->dlo->EndAudioPreroll() != S_OK) {
  439. av_log(avctx, AV_LOG_ERROR, "Could not end audio preroll!\n");
  440. return AVERROR(EIO);
  441. }
  442. av_log(avctx, AV_LOG_DEBUG, "Starting scheduled playback.\n");
  443. if (ctx->dlo->StartScheduledPlayback(0, ctx->bmd_tb_den, 1.0) != S_OK) {
  444. av_log(avctx, AV_LOG_ERROR, "Could not start scheduled playback!\n");
  445. return AVERROR(EIO);
  446. }
  447. ctx->playback_started = 1;
  448. }
  449. return 0;
  450. }
  451. static int decklink_write_audio_packet(AVFormatContext *avctx, AVPacket *pkt)
  452. {
  453. struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
  454. struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
  455. int sample_count = pkt->size / (ctx->channels << 1);
  456. uint32_t buffered;
  457. ctx->dlo->GetBufferedAudioSampleFrameCount(&buffered);
  458. if (pkt->pts > 1 && !buffered)
  459. av_log(avctx, AV_LOG_WARNING, "There's no buffered audio."
  460. " Audio will misbehave!\n");
  461. if (ctx->dlo->ScheduleAudioSamples(pkt->data, sample_count, pkt->pts,
  462. bmdAudioSampleRate48kHz, NULL) != S_OK) {
  463. av_log(avctx, AV_LOG_ERROR, "Could not schedule audio samples.\n");
  464. return AVERROR(EIO);
  465. }
  466. return 0;
  467. }
  468. extern "C" {
  469. av_cold int ff_decklink_write_header(AVFormatContext *avctx)
  470. {
  471. struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
  472. struct decklink_ctx *ctx;
  473. unsigned int n;
  474. int ret;
  475. ctx = (struct decklink_ctx *) av_mallocz(sizeof(struct decklink_ctx));
  476. if (!ctx)
  477. return AVERROR(ENOMEM);
  478. ctx->list_devices = cctx->list_devices;
  479. ctx->list_formats = cctx->list_formats;
  480. ctx->preroll = cctx->preroll;
  481. ctx->duplex_mode = cctx->duplex_mode;
  482. cctx->ctx = ctx;
  483. #if CONFIG_LIBKLVANC
  484. if (klvanc_context_create(&ctx->vanc_ctx) < 0) {
  485. av_log(avctx, AV_LOG_ERROR, "Cannot create VANC library context\n");
  486. return AVERROR(ENOMEM);
  487. }
  488. ctx->supports_vanc = 1;
  489. #endif
  490. /* List available devices and exit. */
  491. if (ctx->list_devices) {
  492. ff_decklink_list_devices_legacy(avctx, 0, 1);
  493. return AVERROR_EXIT;
  494. }
  495. ret = ff_decklink_init_device(avctx, avctx->url);
  496. if (ret < 0)
  497. return ret;
  498. /* Get output device. */
  499. if (ctx->dl->QueryInterface(IID_IDeckLinkOutput, (void **) &ctx->dlo) != S_OK) {
  500. av_log(avctx, AV_LOG_ERROR, "Could not open output device from '%s'\n",
  501. avctx->url);
  502. ret = AVERROR(EIO);
  503. goto error;
  504. }
  505. /* List supported formats. */
  506. if (ctx->list_formats) {
  507. ff_decklink_list_formats(avctx);
  508. ret = AVERROR_EXIT;
  509. goto error;
  510. }
  511. /* Setup streams. */
  512. ret = AVERROR(EIO);
  513. for (n = 0; n < avctx->nb_streams; n++) {
  514. AVStream *st = avctx->streams[n];
  515. AVCodecParameters *c = st->codecpar;
  516. if (c->codec_type == AVMEDIA_TYPE_AUDIO) {
  517. if (decklink_setup_audio(avctx, st))
  518. goto error;
  519. } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
  520. if (decklink_setup_video(avctx, st))
  521. goto error;
  522. } else {
  523. av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");
  524. goto error;
  525. }
  526. }
  527. return 0;
  528. error:
  529. ff_decklink_cleanup(avctx);
  530. return ret;
  531. }
  532. int ff_decklink_write_packet(AVFormatContext *avctx, AVPacket *pkt)
  533. {
  534. struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
  535. struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
  536. AVStream *st = avctx->streams[pkt->stream_index];
  537. ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
  538. if (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
  539. return decklink_write_video_packet(avctx, pkt);
  540. else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
  541. return decklink_write_audio_packet(avctx, pkt);
  542. return AVERROR(EIO);
  543. }
  544. int ff_decklink_list_output_devices(AVFormatContext *avctx, struct AVDeviceInfoList *device_list)
  545. {
  546. return ff_decklink_list_devices(avctx, device_list, 0, 1);
  547. }
  548. } /* extern "C" */