FFmpeg
decklink_enc.cpp
Go to the documentation of this file.
1 /*
2  * Blackmagic DeckLink output
3  * Copyright (c) 2013-2014 Ramiro Polla
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <atomic>
23 using std::atomic;
24 
25 /* Include internal.h first to avoid conflict between winsock.h (used by
26  * DeckLink headers) and winsock2.h (used by libavformat) in MSVC++ builds */
27 extern "C" {
28 #include "libavformat/internal.h"
29 }
30 
31 #include <DeckLinkAPI.h>
32 
33 extern "C" {
34 #include "libavformat/avformat.h"
35 #include "libavcodec/bytestream.h"
36 #include "libavutil/internal.h"
37 #include "libavutil/imgutils.h"
38 #include "avdevice.h"
39 }
40 
41 #include "decklink_common.h"
42 #include "decklink_enc.h"
43 #if CONFIG_LIBKLVANC
44 #include "libklvanc/vanc.h"
45 #include "libklvanc/vanc-lines.h"
46 #include "libklvanc/pixels.h"
47 #endif
48 
49 /* DeckLink callback class declaration */
50 class decklink_frame : public IDeckLinkVideoFrame
51 {
52 public:
57  virtual long STDMETHODCALLTYPE GetWidth (void) { return _width; }
58  virtual long STDMETHODCALLTYPE GetHeight (void) { return _height; }
59  virtual long STDMETHODCALLTYPE GetRowBytes (void)
60  {
62  return _avframe->linesize[0] < 0 ? -_avframe->linesize[0] : _avframe->linesize[0];
63  else
64  return ((GetWidth() + 47) / 48) * 128;
65  }
66  virtual BMDPixelFormat STDMETHODCALLTYPE GetPixelFormat(void)
67  {
69  return bmdFormat8BitYUV;
70  else
71  return bmdFormat10BitYUV;
72  }
73  virtual BMDFrameFlags STDMETHODCALLTYPE GetFlags (void)
74  {
76  return _avframe->linesize[0] < 0 ? bmdFrameFlagFlipVertical : bmdFrameFlagDefault;
77  else
78  return bmdFrameFlagDefault;
79  }
80 
81  virtual HRESULT STDMETHODCALLTYPE GetBytes (void **buffer)
82  {
84  if (_avframe->linesize[0] < 0)
85  *buffer = (void *)(_avframe->data[0] + _avframe->linesize[0] * (_avframe->height - 1));
86  else
87  *buffer = (void *)(_avframe->data[0]);
88  } else {
89  *buffer = (void *)(_avpacket->data);
90  }
91  return S_OK;
92  }
93 
94  virtual HRESULT STDMETHODCALLTYPE GetTimecode (BMDTimecodeFormat format, IDeckLinkTimecode **timecode) { return S_FALSE; }
95  virtual HRESULT STDMETHODCALLTYPE GetAncillaryData(IDeckLinkVideoFrameAncillary **ancillary)
96  {
97  *ancillary = _ancillary;
98  if (_ancillary) {
99  _ancillary->AddRef();
100  return S_OK;
101  } else {
102  return S_FALSE;
103  }
104  }
105  virtual HRESULT STDMETHODCALLTYPE SetAncillaryData(IDeckLinkVideoFrameAncillary *ancillary)
106  {
107  if (_ancillary)
108  _ancillary->Release();
109  _ancillary = ancillary;
110  _ancillary->AddRef();
111  return S_OK;
112  }
113  virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; }
114  virtual ULONG STDMETHODCALLTYPE AddRef(void) { return ++_refs; }
115  virtual ULONG STDMETHODCALLTYPE Release(void)
116  {
117  int ret = --_refs;
118  if (!ret) {
121  if (_ancillary)
122  _ancillary->Release();
123  delete this;
124  }
125  return ret;
126  }
127 
132  IDeckLinkVideoFrameAncillary *_ancillary;
133  int _height;
134  int _width;
135 
136 private:
137  std::atomic<int> _refs;
138 };
139 
140 class decklink_output_callback : public IDeckLinkVideoOutputCallback
141 {
142 public:
143  virtual HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted(IDeckLinkVideoFrame *_frame, BMDOutputFrameCompletionResult result)
144  {
145  decklink_frame *frame = static_cast<decklink_frame *>(_frame);
146  struct decklink_ctx *ctx = frame->_ctx;
147 
148  if (frame->_avframe)
149  av_frame_unref(frame->_avframe);
150  if (frame->_avpacket)
151  av_packet_unref(frame->_avpacket);
152 
153  pthread_mutex_lock(&ctx->mutex);
154  ctx->frames_buffer_available_spots++;
155  pthread_cond_broadcast(&ctx->cond);
156  pthread_mutex_unlock(&ctx->mutex);
157 
158  return S_OK;
159  }
160  virtual HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped(void) { return S_OK; }
161  virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; }
162  virtual ULONG STDMETHODCALLTYPE AddRef(void) { return 1; }
163  virtual ULONG STDMETHODCALLTYPE Release(void) { return 1; }
164 };
165 
167 {
168  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
169  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
171 
172  if (ctx->video) {
173  av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
174  return -1;
175  }
176 
177  if (c->codec_id == AV_CODEC_ID_WRAPPED_AVFRAME) {
178  if (c->format != AV_PIX_FMT_UYVY422) {
179  av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
180  " Only AV_PIX_FMT_UYVY422 is supported.\n");
181  return -1;
182  }
183  ctx->raw_format = bmdFormat8BitYUV;
184  } else if (c->codec_id != AV_CODEC_ID_V210) {
185  av_log(avctx, AV_LOG_ERROR, "Unsupported codec type!"
186  " Only V210 and wrapped frame with AV_PIX_FMT_UYVY422 are supported.\n");
187  return -1;
188  } else {
189  ctx->raw_format = bmdFormat10BitYUV;
190  }
191 
192  if (ff_decklink_set_configs(avctx, DIRECTION_OUT) < 0) {
193  av_log(avctx, AV_LOG_ERROR, "Could not set output configuration\n");
194  return -1;
195  }
196  if (ff_decklink_set_format(avctx, c->width, c->height,
197  st->time_base.num, st->time_base.den, c->field_order)) {
198  av_log(avctx, AV_LOG_ERROR, "Unsupported video size, framerate or field order!"
199  " Check available formats with -list_formats 1.\n");
200  return -1;
201  }
202  if (ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputVANC) != S_OK) {
203  av_log(avctx, AV_LOG_WARNING, "Could not enable video output with VANC! Trying without...\n");
204  ctx->supports_vanc = 0;
205  }
206  if (!ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputFlagDefault) != S_OK) {
207  av_log(avctx, AV_LOG_ERROR, "Could not enable video output!\n");
208  return -1;
209  }
210 
211  /* Set callback. */
212  ctx->output_callback = new decklink_output_callback();
213  ctx->dlo->SetScheduledFrameCompletionCallback(ctx->output_callback);
214 
215  ctx->frames_preroll = st->time_base.den * ctx->preroll;
216  if (st->time_base.den > 1000)
217  ctx->frames_preroll /= 1000;
218 
219  /* Buffer twice as many frames as the preroll. */
220  ctx->frames_buffer = ctx->frames_preroll * 2;
221  ctx->frames_buffer = FFMIN(ctx->frames_buffer, 60);
222  pthread_mutex_init(&ctx->mutex, NULL);
223  pthread_cond_init(&ctx->cond, NULL);
224  ctx->frames_buffer_available_spots = ctx->frames_buffer;
225 
226  av_log(avctx, AV_LOG_DEBUG, "output: %s, preroll: %d, frames buffer size: %d\n",
227  avctx->url, ctx->frames_preroll, ctx->frames_buffer);
228 
229  /* The device expects the framerate to be fixed. */
230  avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
231 
232  ctx->video = 1;
233 
234  return 0;
235 }
236 
238 {
239  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
240  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
242 
243  if (ctx->audio) {
244  av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
245  return -1;
246  }
247 
248  if (c->codec_id == AV_CODEC_ID_AC3) {
249  /* Regardless of the number of channels in the codec, we're only
250  using 2 SDI audio channels at 48000Hz */
251  ctx->channels = 2;
252  } else if (c->codec_id == AV_CODEC_ID_PCM_S16LE) {
253  if (c->sample_rate != 48000) {
254  av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
255  " Only 48kHz is supported.\n");
256  return -1;
257  }
258  if (c->ch_layout.nb_channels != 2 && c->ch_layout.nb_channels != 8 && c->ch_layout.nb_channels != 16) {
259  av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
260  " Only 2, 8 or 16 channels are supported.\n");
261  return -1;
262  }
263  ctx->channels = c->ch_layout.nb_channels;
264  } else {
265  av_log(avctx, AV_LOG_ERROR, "Unsupported codec specified!"
266  " Only PCM_S16LE and AC-3 are supported.\n");
267  return -1;
268  }
269 
270  if (ctx->dlo->EnableAudioOutput(bmdAudioSampleRate48kHz,
271  bmdAudioSampleType16bitInteger,
272  ctx->channels,
273  bmdAudioOutputStreamTimestamped) != S_OK) {
274  av_log(avctx, AV_LOG_ERROR, "Could not enable audio output!\n");
275  return -1;
276  }
277  if (ctx->dlo->BeginAudioPreroll() != S_OK) {
278  av_log(avctx, AV_LOG_ERROR, "Could not begin audio preroll!\n");
279  return -1;
280  }
281 
282  /* The device expects the sample rate to be fixed. */
283  avpriv_set_pts_info(st, 64, 1, 48000);
284 
285  ctx->audio = 1;
286 
287  return 0;
288 }
289 
290 /* Wrap the AC-3 packet into an S337 payload that is in S16LE format which can be easily
291  injected into the PCM stream. Note: despite the function name, only AC-3 is implemented */
292 static int create_s337_payload(AVPacket *pkt, uint8_t **outbuf, int *outsize)
293 {
294  /* Note: if the packet size is not divisible by four, we need to make the actual
295  payload larger to ensure it ends on an two channel S16LE boundary */
296  int payload_size = FFALIGN(pkt->size, 4) + 8;
297  uint16_t bitcount = pkt->size * 8;
298  uint8_t *s337_payload;
299  PutByteContext pb;
300 
301  /* Sanity check: According to SMPTE ST 340:2015 Sec 4.1, the AC-3 sync frame will
302  exactly match the 1536 samples of baseband (PCM) audio that it represents. */
303  if (pkt->size > 1536)
304  return AVERROR(EINVAL);
305 
306  /* Encapsulate AC3 syncframe into SMPTE 337 packet */
307  s337_payload = (uint8_t *) av_malloc(payload_size);
308  if (s337_payload == NULL)
309  return AVERROR(ENOMEM);
310  bytestream2_init_writer(&pb, s337_payload, payload_size);
311  bytestream2_put_le16u(&pb, 0xf872); /* Sync word 1 */
312  bytestream2_put_le16u(&pb, 0x4e1f); /* Sync word 1 */
313  bytestream2_put_le16u(&pb, 0x0001); /* Burst Info, including data type (1=ac3) */
314  bytestream2_put_le16u(&pb, bitcount); /* Length code */
315  for (int i = 0; i < (pkt->size - 1); i += 2)
316  bytestream2_put_le16u(&pb, (pkt->data[i] << 8) | pkt->data[i+1]);
317 
318  /* Ensure final payload is aligned on 4-byte boundary */
319  if (pkt->size & 1)
320  bytestream2_put_le16u(&pb, pkt->data[pkt->size - 1] << 8);
321  if ((pkt->size & 3) == 1 || (pkt->size & 3) == 2)
322  bytestream2_put_le16u(&pb, 0);
323 
324  *outsize = payload_size;
325  *outbuf = s337_payload;
326  return 0;
327 }
328 
330 {
331  int ret = -1;
332 
333  switch(st->codecpar->codec_id) {
334 #if CONFIG_LIBKLVANC
335  case AV_CODEC_ID_EIA_608:
336  /* No special setup required */
337  ret = 0;
338  break;
339 #endif
340  default:
341  av_log(avctx, AV_LOG_ERROR, "Unsupported subtitle codec specified\n");
342  break;
343  }
344 
345  return ret;
346 }
347 
349 {
350  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
351  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
352 
353  if (ctx->playback_started) {
354  BMDTimeValue actual;
355  ctx->dlo->StopScheduledPlayback(ctx->last_pts * ctx->bmd_tb_num,
356  &actual, ctx->bmd_tb_den);
357  ctx->dlo->DisableVideoOutput();
358  if (ctx->audio)
359  ctx->dlo->DisableAudioOutput();
360  }
361 
362  ff_decklink_cleanup(avctx);
363 
364  if (ctx->output_callback)
365  delete ctx->output_callback;
366 
367  pthread_mutex_destroy(&ctx->mutex);
368  pthread_cond_destroy(&ctx->cond);
369 
370 #if CONFIG_LIBKLVANC
371  klvanc_context_destroy(ctx->vanc_ctx);
372 #endif
373 
374  ff_ccfifo_uninit(&ctx->cc_fifo);
375  av_freep(&cctx->ctx);
376 
377  return 0;
378 }
379 
380 #if CONFIG_LIBKLVANC
381 static void construct_cc(AVFormatContext *avctx, struct decklink_ctx *ctx,
382  AVPacket *pkt, struct klvanc_line_set_s *vanc_lines)
383 {
384  struct klvanc_packet_eia_708b_s *cdp;
385  uint16_t *cdp_words;
386  uint16_t len;
387  uint8_t cc_count;
388  size_t size;
389  int ret, i;
390 
392  if (!data)
393  return;
394 
395  cc_count = size / 3;
396 
397  ret = klvanc_create_eia708_cdp(&cdp);
398  if (ret)
399  return;
400 
401  ret = klvanc_set_framerate_EIA_708B(cdp, ctx->bmd_tb_num, ctx->bmd_tb_den);
402  if (ret) {
403  av_log(avctx, AV_LOG_ERROR, "Invalid framerate specified: %lld/%lld\n",
404  ctx->bmd_tb_num, ctx->bmd_tb_den);
405  klvanc_destroy_eia708_cdp(cdp);
406  return;
407  }
408 
409  if (cc_count > KLVANC_MAX_CC_COUNT) {
410  av_log(avctx, AV_LOG_ERROR, "Illegal cc_count received: %d\n", cc_count);
411  cc_count = KLVANC_MAX_CC_COUNT;
412  }
413 
414  /* CC data */
415  cdp->header.ccdata_present = 1;
416  cdp->header.caption_service_active = 1;
417  cdp->ccdata.cc_count = cc_count;
418  for (i = 0; i < cc_count; i++) {
419  if (data [3*i] & 0x04)
420  cdp->ccdata.cc[i].cc_valid = 1;
421  cdp->ccdata.cc[i].cc_type = data[3*i] & 0x03;
422  cdp->ccdata.cc[i].cc_data[0] = data[3*i+1];
423  cdp->ccdata.cc[i].cc_data[1] = data[3*i+2];
424  }
425 
426  klvanc_finalize_EIA_708B(cdp, ctx->cdp_sequence_num++);
427  ret = klvanc_convert_EIA_708B_to_words(cdp, &cdp_words, &len);
428  klvanc_destroy_eia708_cdp(cdp);
429  if (ret != 0) {
430  av_log(avctx, AV_LOG_ERROR, "Failed converting 708 packet to words\n");
431  return;
432  }
433 
434  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, cdp_words, len, 11, 0);
435  free(cdp_words);
436  if (ret != 0) {
437  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
438  return;
439  }
440 }
441 
442 /* See SMPTE ST 2016-3:2009 */
443 static void construct_afd(AVFormatContext *avctx, struct decklink_ctx *ctx,
444  AVPacket *pkt, struct klvanc_line_set_s *vanc_lines,
445  AVStream *st)
446 {
447  struct klvanc_packet_afd_s *afd = NULL;
448  uint16_t *afd_words = NULL;
449  uint16_t len;
450  size_t size;
451  int f1_line = 12, f2_line = 0, ret;
452 
453  const uint8_t *data = av_packet_get_side_data(pkt, AV_PKT_DATA_AFD, &size);
454  if (!data || size == 0)
455  return;
456 
457  ret = klvanc_create_AFD(&afd);
458  if (ret)
459  return;
460 
461  ret = klvanc_set_AFD_val(afd, data[0]);
462  if (ret) {
463  av_log(avctx, AV_LOG_ERROR, "Invalid AFD value specified: %d\n",
464  data[0]);
465  klvanc_destroy_AFD(afd);
466  return;
467  }
468 
469  /* Compute the AR flag based on the DAR (see ST 2016-1:2009 Sec 9.1). Note, we treat
470  anything below 1.4 as 4:3 (as opposed to the standard 1.33), because there are lots
471  of streams in the field that aren't *exactly* 4:3 but a tiny bit larger after doing
472  the math... */
474  st->codecpar->height * st->codecpar->sample_aspect_ratio.den}, (AVRational) {14, 10}) == 1)
475  afd->aspectRatio = ASPECT_16x9;
476  else
477  afd->aspectRatio = ASPECT_4x3;
478 
479  ret = klvanc_convert_AFD_to_words(afd, &afd_words, &len);
480  if (ret) {
481  av_log(avctx, AV_LOG_ERROR, "Failed converting AFD packet to words\n");
482  goto out;
483  }
484 
485  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, afd_words, len, f1_line, 0);
486  if (ret) {
487  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
488  goto out;
489  }
490 
491  /* For interlaced video, insert into both fields. Switching lines for field 2
492  derived from SMPTE RP 168:2009, Sec 6, Table 2. */
493  switch (ctx->bmd_mode) {
494  case bmdModeNTSC:
495  case bmdModeNTSC2398:
496  f2_line = 273 - 10 + f1_line;
497  break;
498  case bmdModePAL:
499  f2_line = 319 - 6 + f1_line;
500  break;
501  case bmdModeHD1080i50:
502  case bmdModeHD1080i5994:
503  case bmdModeHD1080i6000:
504  f2_line = 569 - 7 + f1_line;
505  break;
506  default:
507  f2_line = 0;
508  break;
509  }
510 
511  if (f2_line > 0) {
512  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, afd_words, len, f2_line, 0);
513  if (ret) {
514  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
515  goto out;
516  }
517  }
518 
519 out:
520  if (afd)
521  klvanc_destroy_AFD(afd);
522  if (afd_words)
523  free(afd_words);
524 }
525 
526 /* Parse any EIA-608 subtitles sitting on the queue, and write packet side data
527  that will later be handled by construct_cc... */
528 static void parse_608subs(AVFormatContext *avctx, struct decklink_ctx *ctx, AVPacket *pkt)
529 {
530  size_t cc_size = ff_ccfifo_getoutputsize(&ctx->cc_fifo);
531  uint8_t *cc_data;
532 
533  if (!ff_ccfifo_ccdetected(&ctx->cc_fifo))
534  return;
535 
536  cc_data = av_packet_new_side_data(pkt, AV_PKT_DATA_A53_CC, cc_size);
537  if (cc_data)
538  ff_ccfifo_injectbytes(&ctx->cc_fifo, cc_data, cc_size);
539 }
540 
541 static int decklink_construct_vanc(AVFormatContext *avctx, struct decklink_ctx *ctx,
543  AVStream *st)
544 {
545  struct klvanc_line_set_s vanc_lines = { 0 };
546  int ret = 0, i;
547 
548  if (!ctx->supports_vanc)
549  return 0;
550 
551  parse_608subs(avctx, ctx, pkt);
552  construct_cc(avctx, ctx, pkt, &vanc_lines);
553  construct_afd(avctx, ctx, pkt, &vanc_lines, st);
554 
555  IDeckLinkVideoFrameAncillary *vanc;
556  int result = ctx->dlo->CreateAncillaryData(bmdFormat10BitYUV, &vanc);
557  if (result != S_OK) {
558  av_log(avctx, AV_LOG_ERROR, "Failed to create vanc\n");
559  ret = AVERROR(EIO);
560  goto done;
561  }
562 
563  /* Now that we've got all the VANC lines in a nice orderly manner, generate the
564  final VANC sections for the Decklink output */
565  for (i = 0; i < vanc_lines.num_lines; i++) {
566  struct klvanc_line_s *line = vanc_lines.lines[i];
567  int real_line;
568  void *buf;
569 
570  if (!line)
571  break;
572 
573  /* FIXME: include hack for certain Decklink cards which mis-represent
574  line numbers for pSF frames */
575  real_line = line->line_number;
576 
577  result = vanc->GetBufferForVerticalBlankingLine(real_line, &buf);
578  if (result != S_OK) {
579  av_log(avctx, AV_LOG_ERROR, "Failed to get VANC line %d: %d", real_line, result);
580  continue;
581  }
582 
583  /* Generate the full line taking into account all VANC packets on that line */
584  result = klvanc_generate_vanc_line_v210(ctx->vanc_ctx, line, (uint8_t *) buf,
585  ctx->bmd_width);
586  if (result) {
587  av_log(avctx, AV_LOG_ERROR, "Failed to generate VANC line\n");
588  continue;
589  }
590  }
591 
592  result = frame->SetAncillaryData(vanc);
593  vanc->Release();
594  if (result != S_OK) {
595  av_log(avctx, AV_LOG_ERROR, "Failed to set vanc: %d", result);
596  ret = AVERROR(EIO);
597  }
598 
599 done:
600  for (i = 0; i < vanc_lines.num_lines; i++)
601  klvanc_line_free(vanc_lines.lines[i]);
602 
603  return ret;
604 }
605 #endif
606 
608 {
609  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
610  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
611  AVStream *st = avctx->streams[pkt->stream_index];
612  AVFrame *avframe = NULL, *tmp = (AVFrame *)pkt->data;
613  AVPacket *avpacket = NULL;
615  uint32_t buffered;
616  HRESULT hr;
617 
618  ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
619 
621  if (tmp->format != AV_PIX_FMT_UYVY422 ||
622  tmp->width != ctx->bmd_width ||
623  tmp->height != ctx->bmd_height) {
624  av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
625  return AVERROR(EINVAL);
626  }
627 
628  avframe = av_frame_clone(tmp);
629  if (!avframe) {
630  av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
631  return AVERROR(EIO);
632  }
633 
634  frame = new decklink_frame(ctx, avframe, st->codecpar->codec_id, avframe->height, avframe->width);
635  } else {
636  avpacket = av_packet_clone(pkt);
637  if (!avpacket) {
638  av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
639  return AVERROR(EIO);
640  }
641 
642  frame = new decklink_frame(ctx, avpacket, st->codecpar->codec_id, ctx->bmd_height, ctx->bmd_width);
643 
644 #if CONFIG_LIBKLVANC
645  if (decklink_construct_vanc(avctx, ctx, pkt, frame, st))
646  av_log(avctx, AV_LOG_ERROR, "Failed to construct VANC\n");
647 #endif
648  }
649 
650  if (!frame) {
651  av_log(avctx, AV_LOG_ERROR, "Could not create new frame.\n");
652  av_frame_free(&avframe);
653  av_packet_free(&avpacket);
654  return AVERROR(EIO);
655  }
656 
657  /* Always keep at most one second of frames buffered. */
658  pthread_mutex_lock(&ctx->mutex);
659  while (ctx->frames_buffer_available_spots == 0) {
660  pthread_cond_wait(&ctx->cond, &ctx->mutex);
661  }
662  ctx->frames_buffer_available_spots--;
663  pthread_mutex_unlock(&ctx->mutex);
664 
665  if (ctx->first_pts == AV_NOPTS_VALUE)
666  ctx->first_pts = pkt->pts;
667 
668  /* Schedule frame for playback. */
669  hr = ctx->dlo->ScheduleVideoFrame((class IDeckLinkVideoFrame *) frame,
670  pkt->pts * ctx->bmd_tb_num,
671  ctx->bmd_tb_num, ctx->bmd_tb_den);
672  /* Pass ownership to DeckLink, or release on failure */
673  frame->Release();
674  if (hr != S_OK) {
675  av_log(avctx, AV_LOG_ERROR, "Could not schedule video frame."
676  " error %08x.\n", (uint32_t) hr);
677  return AVERROR(EIO);
678  }
679 
680  ctx->dlo->GetBufferedVideoFrameCount(&buffered);
681  av_log(avctx, AV_LOG_DEBUG, "Buffered video frames: %d.\n", (int) buffered);
682  if (pkt->pts > 2 && buffered <= 2)
683  av_log(avctx, AV_LOG_WARNING, "There are not enough buffered video frames."
684  " Video may misbehave!\n");
685 
686  /* Preroll video frames. */
687  if (!ctx->playback_started && pkt->pts > (ctx->first_pts + ctx->frames_preroll)) {
688  av_log(avctx, AV_LOG_DEBUG, "Ending audio preroll.\n");
689  if (ctx->audio && ctx->dlo->EndAudioPreroll() != S_OK) {
690  av_log(avctx, AV_LOG_ERROR, "Could not end audio preroll!\n");
691  return AVERROR(EIO);
692  }
693  av_log(avctx, AV_LOG_DEBUG, "Starting scheduled playback.\n");
694  if (ctx->dlo->StartScheduledPlayback(ctx->first_pts * ctx->bmd_tb_num, ctx->bmd_tb_den, 1.0) != S_OK) {
695  av_log(avctx, AV_LOG_ERROR, "Could not start scheduled playback!\n");
696  return AVERROR(EIO);
697  }
698  ctx->playback_started = 1;
699  }
700 
701  return 0;
702 }
703 
705 {
706  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
707  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
708  AVStream *st = avctx->streams[pkt->stream_index];
709  int sample_count;
710  uint32_t buffered;
711  uint8_t *outbuf = NULL;
712  int ret = 0;
713 
714  ctx->dlo->GetBufferedAudioSampleFrameCount(&buffered);
715  if (pkt->pts > 1 && !buffered)
716  av_log(avctx, AV_LOG_WARNING, "There's no buffered audio."
717  " Audio will misbehave!\n");
718 
719  if (st->codecpar->codec_id == AV_CODEC_ID_AC3) {
720  /* Encapsulate AC3 syncframe into SMPTE 337 packet */
721  int outbuf_size;
722  ret = create_s337_payload(pkt, &outbuf, &outbuf_size);
723  if (ret < 0)
724  return ret;
725  sample_count = outbuf_size / 4;
726  } else {
727  sample_count = pkt->size / (ctx->channels << 1);
728  outbuf = pkt->data;
729  }
730 
731  if (ctx->dlo->ScheduleAudioSamples(outbuf, sample_count, pkt->pts,
732  bmdAudioSampleRate48kHz, NULL) != S_OK) {
733  av_log(avctx, AV_LOG_ERROR, "Could not schedule audio samples.\n");
734  ret = AVERROR(EIO);
735  }
736 
737  if (st->codecpar->codec_id == AV_CODEC_ID_AC3)
738  av_freep(&outbuf);
739 
740  return ret;
741 }
742 
744 {
745  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
746  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
747 
748  ff_ccfifo_extractbytes(&ctx->cc_fifo, pkt->data, pkt->size);
749 
750  return 0;
751 }
752 
753 extern "C" {
754 
756 {
757  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
758  struct decklink_ctx *ctx;
759  unsigned int n;
760  int ret;
761 
762  ctx = (struct decklink_ctx *) av_mallocz(sizeof(struct decklink_ctx));
763  if (!ctx)
764  return AVERROR(ENOMEM);
765  ctx->list_devices = cctx->list_devices;
766  ctx->list_formats = cctx->list_formats;
767  ctx->preroll = cctx->preroll;
768  ctx->duplex_mode = cctx->duplex_mode;
769  ctx->first_pts = AV_NOPTS_VALUE;
770  if (cctx->link > 0 && (unsigned int)cctx->link < FF_ARRAY_ELEMS(decklink_link_conf_map))
771  ctx->link = decklink_link_conf_map[cctx->link];
772  cctx->ctx = ctx;
773 #if CONFIG_LIBKLVANC
774  if (klvanc_context_create(&ctx->vanc_ctx) < 0) {
775  av_log(avctx, AV_LOG_ERROR, "Cannot create VANC library context\n");
776  return AVERROR(ENOMEM);
777  }
778  ctx->supports_vanc = 1;
779 #endif
780 
781  /* List available devices and exit. */
782  if (ctx->list_devices) {
783  ff_decklink_list_devices_legacy(avctx, 0, 1);
784  return AVERROR_EXIT;
785  }
786 
787  ret = ff_decklink_init_device(avctx, avctx->url);
788  if (ret < 0)
789  return ret;
790 
791  /* Get output device. */
792  if (ctx->dl->QueryInterface(IID_IDeckLinkOutput, (void **) &ctx->dlo) != S_OK) {
793  av_log(avctx, AV_LOG_ERROR, "Could not open output device from '%s'\n",
794  avctx->url);
795  ret = AVERROR(EIO);
796  goto error;
797  }
798 
799  /* List supported formats. */
800  if (ctx->list_formats) {
802  ret = AVERROR_EXIT;
803  goto error;
804  }
805 
806  /* Setup streams. */
807  ret = AVERROR(EIO);
808  for (n = 0; n < avctx->nb_streams; n++) {
809  AVStream *st = avctx->streams[n];
811  if (c->codec_type == AVMEDIA_TYPE_AUDIO) {
812  if (decklink_setup_audio(avctx, st))
813  goto error;
814  } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
815  if (decklink_setup_video(avctx, st))
816  goto error;
817  } else if (c->codec_type == AVMEDIA_TYPE_SUBTITLE) {
818  if (decklink_setup_subtitle(avctx, st))
819  goto error;
820  } else {
821  av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");
822  goto error;
823  }
824  }
825 
826  for (n = 0; n < avctx->nb_streams; n++) {
827  AVStream *st = avctx->streams[n];
829 
830  if(c->codec_type == AVMEDIA_TYPE_SUBTITLE)
831  avpriv_set_pts_info(st, 64, ctx->bmd_tb_num, ctx->bmd_tb_den);
832  }
833 
834  ret = ff_ccfifo_init(&ctx->cc_fifo, av_make_q(ctx->bmd_tb_den, ctx->bmd_tb_num), avctx);
835  if (ret < 0) {
836  av_log(ctx, AV_LOG_ERROR, "Failure to setup CC FIFO queue\n");
837  goto error;
838  }
839 
840  return 0;
841 
842 error:
843  ff_decklink_cleanup(avctx);
844  return ret;
845 }
846 
848 {
849  AVStream *st = avctx->streams[pkt->stream_index];
850 
852  return decklink_write_video_packet(avctx, pkt);
853  else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
854  return decklink_write_audio_packet(avctx, pkt);
855  else if (st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE)
856  return decklink_write_subtitle_packet(avctx, pkt);
857 
858  return AVERROR(EIO);
859 }
860 
862 {
863  return ff_decklink_list_devices(avctx, device_list, 0, 1);
864 }
865 
866 } /* extern "C" */
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:31
AV_CODEC_ID_PCM_S16LE
@ AV_CODEC_ID_PCM_S16LE
Definition: codec_id.h:327
AV_CODEC_ID_EIA_608
@ AV_CODEC_ID_EIA_608
Definition: codec_id.h:555
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:422
AVMEDIA_TYPE_SUBTITLE
@ AVMEDIA_TYPE_SUBTITLE
Definition: avutil.h:204
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AV_CODEC_ID_AC3
@ AV_CODEC_ID_AC3
Definition: codec_id.h:442
ff_ccfifo_extractbytes
int ff_ccfifo_extractbytes(CCFifo *ccf, uint8_t *cc_bytes, size_t len)
Just like ff_ccfifo_extract(), but takes the raw bytes instead of an AVFrame.
Definition: ccfifo.c:164
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecParameters::codec_type
enum AVMediaType codec_type
General type of the encoded data.
Definition: codec_par.h:58
out
FILE * out
Definition: movenc.c:54
AVCodecParameters
This struct describes the properties of an encoded stream.
Definition: codec_par.h:54
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:100
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:28
AVFormatContext::streams
AVStream ** streams
A list of all streams in the file.
Definition: avformat.h:1172
ff_ccfifo_ccdetected
int ff_ccfifo_ccdetected(const CCFifo *ccf)
Returns 1 if captions have been found as a prior call to ff_ccfifo_extract() or ff_ccfifo_extractbyte...
Definition: ccfifo.c:97
AVFrame::width
int width
Definition: frame.h:402
AVPacket::data
uint8_t * data
Definition: packet.h:374
data
const char data[16]
Definition: mxf.c:148
AV_PKT_DATA_AFD
@ AV_PKT_DATA_AFD
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: packet.h:262
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_packet_free
void av_packet_free(AVPacket **pkt)
Free the packet, if the packet is reference counted, it will be unreferenced first.
Definition: avpacket.c:73
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
avpriv_set_pts_info
void avpriv_set_pts_info(AVStream *st, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
Definition: avformat.c:785
ff_ccfifo_uninit
void ff_ccfifo_uninit(CCFifo *ccf)
Free all memory allocated in a CCFifo and clear the context.
Definition: ccfifo.c:46
AVRational::num
int num
Numerator.
Definition: rational.h:59
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
bytestream2_init_writer
static av_always_inline void bytestream2_init_writer(PutByteContext *p, uint8_t *buf, int buf_size)
Definition: bytestream.h:147
width
#define width
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVCodecParameters::sample_aspect_ratio
AVRational sample_aspect_ratio
Video only.
Definition: codec_par.h:138
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
AVCodecParameters::width
int width
Video only.
Definition: codec_par.h:128
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:609
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:388
AV_CODEC_ID_WRAPPED_AVFRAME
@ AV_CODEC_ID_WRAPPED_AVFRAME
Passthrough codec, AVFrames wrapped in AVPacket.
Definition: codec_id.h:596
pthread_cond_broadcast
static av_always_inline int pthread_cond_broadcast(pthread_cond_t *cond)
Definition: os2threads.h:162
AVFormatContext
Format I/O context.
Definition: avformat.h:1104
internal.h
AVStream::codecpar
AVCodecParameters * codecpar
Codec parameters associated with this stream.
Definition: avformat.h:861
result
and forward the result(frame or status change) to the corresponding input. If nothing is possible
AVStream::time_base
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
Definition: avformat.h:877
NULL
#define NULL
Definition: coverity.c:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:79
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:49
AVFormatContext::nb_streams
unsigned int nb_streams
Number of elements in AVFormatContext.streams.
Definition: avformat.h:1160
PutByteContext
Definition: bytestream.h:37
AVPacket::size
int size
Definition: packet.h:375
AVFormatContext::url
char * url
input or output URL.
Definition: avformat.h:1187
size
int size
Definition: twinvq_data.h:10344
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AV_CODEC_ID_V210
@ AV_CODEC_ID_V210
Definition: codec_id.h:179
avdevice.h
height
#define height
line
Definition: graph2dot.c:48
pthread_cond_destroy
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:144
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
ff_ccfifo_getoutputsize
int ff_ccfifo_getoutputsize(const CCFifo *ccf)
Provide the size in bytes of an output buffer to allocate.
Definition: ccfifo.c:92
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:367
av_packet_get_side_data
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, size_t *size)
Get side information from packet.
Definition: avpacket.c:251
internal.h
AVCodecParameters::height
int height
Definition: codec_par.h:129
AV_PKT_DATA_A53_CC
@ AV_PKT_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
Definition: packet.h:243
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:622
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:254
len
int len
Definition: vorbis_enc_data.h:426
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
ff_ccfifo_init
int ff_ccfifo_init(CCFifo *ccf, AVRational framerate, void *log_ctx)
Initialize a CCFifo.
Definition: ccfifo.c:53
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:838
ff_ccfifo_injectbytes
int ff_ccfifo_injectbytes(CCFifo *ccf, uint8_t *cc_data, size_t len)
Just like ff_ccfifo_inject(), but takes the raw bytes to insert the CC data int rather than an AVFram...
Definition: ccfifo.c:102
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVDeviceInfoList
List of devices.
Definition: avdevice.h:343
avformat.h
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
AVFrame::height
int height
Definition: frame.h:402
av_packet_new_side_data
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
Definition: avpacket.c:230
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AVPacket::stream_index
int stream_index
Definition: packet.h:376
pthread_cond_wait
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:192
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVCodecParameters::codec_id
enum AVCodecID codec_id
Specific type of the encoded data (the codec used).
Definition: codec_par.h:62
AVPacket
This structure stores compressed data.
Definition: packet.h:351
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
bytestream.h
imgutils.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
pthread_cond_init
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:133
AVERROR_EXIT
#define AVERROR_EXIT
Immediate exit was requested; the called function should not be restarted.
Definition: error.h:58
AVFormatContext::priv_data
void * priv_data
Format private data.
Definition: avformat.h:1132
av_packet_clone
AVPacket * av_packet_clone(const AVPacket *src)
Create a new packet that references the same data as src.
Definition: avpacket.c:466
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:75