FFmpeg
decklink_enc.cpp
Go to the documentation of this file.
1 /*
2  * Blackmagic DeckLink output
3  * Copyright (c) 2013-2014 Ramiro Polla
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <atomic>
23 using std::atomic;
24 
25 /* Include internal.h first to avoid conflict between winsock.h (used by
26  * DeckLink headers) and winsock2.h (used by libavformat) in MSVC++ builds */
27 extern "C" {
28 #include "libavformat/internal.h"
29 }
30 
31 #include <DeckLinkAPIVersion.h>
32 #include <DeckLinkAPI.h>
33 #if BLACKMAGIC_DECKLINK_API_VERSION >= 0x0e030000
34 #include <DeckLinkAPI_v14_2_1.h>
35 #endif
36 
37 extern "C" {
38 #include "libavformat/avformat.h"
39 #include "libavcodec/bytestream.h"
40 #include "libavutil/frame.h"
41 #include "libavutil/internal.h"
42 #include "libavutil/imgutils.h"
43 #include "avdevice.h"
44 }
45 
46 #include "decklink_common.h"
47 #include "decklink_enc.h"
48 #if CONFIG_LIBKLVANC
49 #include "libklvanc/vanc.h"
50 #include "libklvanc/vanc-lines.h"
51 #include "libklvanc/pixels.h"
52 #endif
53 
54 /* DeckLink callback class declaration */
56 {
57 public:
62  virtual long STDMETHODCALLTYPE GetWidth (void) { return _width; }
63  virtual long STDMETHODCALLTYPE GetHeight (void) { return _height; }
64  virtual long STDMETHODCALLTYPE GetRowBytes (void)
65  {
67  return _avframe->linesize[0] < 0 ? -_avframe->linesize[0] : _avframe->linesize[0];
68  else
69  return ((GetWidth() + 47) / 48) * 128;
70  }
71  virtual BMDPixelFormat STDMETHODCALLTYPE GetPixelFormat(void)
72  {
74  return bmdFormat8BitYUV;
75  else
76  return bmdFormat10BitYUV;
77  }
78  virtual BMDFrameFlags STDMETHODCALLTYPE GetFlags (void)
79  {
81  return _avframe->linesize[0] < 0 ? bmdFrameFlagFlipVertical : bmdFrameFlagDefault;
82  else
83  return bmdFrameFlagDefault;
84  }
85 
86  virtual HRESULT STDMETHODCALLTYPE GetBytes (void **buffer)
87  {
89  if (_avframe->linesize[0] < 0)
90  *buffer = (void *)(_avframe->data[0] + _avframe->linesize[0] * (_avframe->height - 1));
91  else
92  *buffer = (void *)(_avframe->data[0]);
93  } else {
94  *buffer = (void *)(_avpacket->data);
95  }
96  return S_OK;
97  }
98 
99  virtual HRESULT STDMETHODCALLTYPE GetTimecode (BMDTimecodeFormat format, IDeckLinkTimecode **timecode) { return S_FALSE; }
100  virtual HRESULT STDMETHODCALLTYPE GetAncillaryData(IDeckLinkVideoFrameAncillary **ancillary)
101  {
102  *ancillary = _ancillary;
103  if (_ancillary) {
104  _ancillary->AddRef();
105  return S_OK;
106  } else {
107  return S_FALSE;
108  }
109  }
110  virtual HRESULT STDMETHODCALLTYPE SetAncillaryData(IDeckLinkVideoFrameAncillary *ancillary)
111  {
112  if (_ancillary)
113  _ancillary->Release();
114  _ancillary = ancillary;
115  _ancillary->AddRef();
116  return S_OK;
117  }
118  virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, LPVOID *ppv)
119  {
120  if (DECKLINK_IsEqualIID(riid, IID_IUnknown)) {
121  *ppv = static_cast<IUnknown*>(this);
123  *ppv = static_cast<IDeckLinkVideoFrame_v14_2_1*>(this);
124  } else {
125  *ppv = NULL;
126  return E_NOINTERFACE;
127  }
128 
129  AddRef();
130  return S_OK;
131  }
132  virtual ULONG STDMETHODCALLTYPE AddRef(void) { return ++_refs; }
133  virtual ULONG STDMETHODCALLTYPE Release(void)
134  {
135  int ret = --_refs;
136  if (!ret) {
139  if (_ancillary)
140  _ancillary->Release();
141  delete this;
142  }
143  return ret;
144  }
145 
150  IDeckLinkVideoFrameAncillary *_ancillary;
151  int _height;
152  int _width;
153 
154 private:
155  std::atomic<int> _refs;
156 };
157 
159 {
160 public:
161  virtual HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted(IDeckLinkVideoFrame_v14_2_1 *_frame, BMDOutputFrameCompletionResult result)
162  {
163  decklink_frame *frame = static_cast<decklink_frame *>(_frame);
164  struct decklink_ctx *ctx = frame->_ctx;
165 
166  if (frame->_avframe)
167  av_frame_unref(frame->_avframe);
168  if (frame->_avpacket)
169  av_packet_unref(frame->_avpacket);
170 
171  pthread_mutex_lock(&ctx->mutex);
172  ctx->frames_buffer_available_spots++;
173  pthread_cond_broadcast(&ctx->cond);
174  pthread_mutex_unlock(&ctx->mutex);
175 
176  return S_OK;
177  }
178  virtual HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped(void) { return S_OK; }
179  virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, LPVOID *ppv)
180  {
181  if (DECKLINK_IsEqualIID(riid, IID_IUnknown)) {
182  *ppv = static_cast<IUnknown*>(this);
184  *ppv = static_cast<IDeckLinkVideoOutputCallback_v14_2_1*>(this);
185  } else {
186  *ppv = NULL;
187  return E_NOINTERFACE;
188  }
189 
190  AddRef();
191  return S_OK;
192  }
193  virtual ULONG STDMETHODCALLTYPE AddRef(void) { return 1; }
194  virtual ULONG STDMETHODCALLTYPE Release(void) { return 1; }
195 };
196 
198 {
199  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
200  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
202 
203  if (ctx->video) {
204  av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
205  return -1;
206  }
207 
208  if (c->codec_id == AV_CODEC_ID_WRAPPED_AVFRAME) {
209  if (c->format != AV_PIX_FMT_UYVY422) {
210  av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
211  " Only AV_PIX_FMT_UYVY422 is supported.\n");
212  return -1;
213  }
214  ctx->raw_format = bmdFormat8BitYUV;
215  } else if (c->codec_id != AV_CODEC_ID_V210) {
216  av_log(avctx, AV_LOG_ERROR, "Unsupported codec type!"
217  " Only V210 and wrapped frame with AV_PIX_FMT_UYVY422 are supported.\n");
218  return -1;
219  } else {
220  ctx->raw_format = bmdFormat10BitYUV;
221  }
222 
223  if (ff_decklink_set_configs(avctx, DIRECTION_OUT) < 0) {
224  av_log(avctx, AV_LOG_ERROR, "Could not set output configuration\n");
225  return -1;
226  }
227  if (ff_decklink_set_format(avctx, c->width, c->height,
228  st->time_base.num, st->time_base.den, c->field_order)) {
229  av_log(avctx, AV_LOG_ERROR, "Unsupported video size, framerate or field order!"
230  " Check available formats with -list_formats 1.\n");
231  return -1;
232  }
233  if (ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputVANC) != S_OK) {
234  av_log(avctx, AV_LOG_WARNING, "Could not enable video output with VANC! Trying without...\n");
235  ctx->supports_vanc = 0;
236  }
237  if (!ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputFlagDefault) != S_OK) {
238  av_log(avctx, AV_LOG_ERROR, "Could not enable video output!\n");
239  return -1;
240  }
241 
242  /* Set callback. */
243  ctx->output_callback = new decklink_output_callback();
244  ctx->dlo->SetScheduledFrameCompletionCallback(ctx->output_callback);
245 
246  ctx->frames_preroll = st->time_base.den * ctx->preroll;
247  if (st->time_base.den > 1000)
248  ctx->frames_preroll /= 1000;
249 
250  /* Buffer twice as many frames as the preroll. */
251  ctx->frames_buffer = ctx->frames_preroll * 2;
252  ctx->frames_buffer = FFMIN(ctx->frames_buffer, 60);
253  pthread_mutex_init(&ctx->mutex, NULL);
254  pthread_cond_init(&ctx->cond, NULL);
255  ctx->frames_buffer_available_spots = ctx->frames_buffer;
256 
257  av_log(avctx, AV_LOG_DEBUG, "output: %s, preroll: %d, frames buffer size: %d\n",
258  avctx->url, ctx->frames_preroll, ctx->frames_buffer);
259 
260  /* The device expects the framerate to be fixed. */
261  avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
262 
263  ctx->video = 1;
264 
265  return 0;
266 }
267 
269 {
270  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
271  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
273 
274  if (ctx->audio) {
275  av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
276  return -1;
277  }
278 
279  if (c->codec_id == AV_CODEC_ID_AC3) {
280  /* Regardless of the number of channels in the codec, we're only
281  using 2 SDI audio channels at 48000Hz */
282  ctx->channels = 2;
283  } else if (c->codec_id == AV_CODEC_ID_PCM_S16LE) {
284  if (c->sample_rate != 48000) {
285  av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
286  " Only 48kHz is supported.\n");
287  return -1;
288  }
289  if (c->ch_layout.nb_channels != 2 && c->ch_layout.nb_channels != 8 && c->ch_layout.nb_channels != 16) {
290  av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
291  " Only 2, 8 or 16 channels are supported.\n");
292  return -1;
293  }
294  ctx->channels = c->ch_layout.nb_channels;
295  } else {
296  av_log(avctx, AV_LOG_ERROR, "Unsupported codec specified!"
297  " Only PCM_S16LE and AC-3 are supported.\n");
298  return -1;
299  }
300 
301  if (ctx->dlo->EnableAudioOutput(bmdAudioSampleRate48kHz,
302  bmdAudioSampleType16bitInteger,
303  ctx->channels,
304  bmdAudioOutputStreamTimestamped) != S_OK) {
305  av_log(avctx, AV_LOG_ERROR, "Could not enable audio output!\n");
306  return -1;
307  }
308  if (ctx->dlo->BeginAudioPreroll() != S_OK) {
309  av_log(avctx, AV_LOG_ERROR, "Could not begin audio preroll!\n");
310  return -1;
311  }
312 
313  /* The device expects the sample rate to be fixed. */
314  avpriv_set_pts_info(st, 64, 1, 48000);
315 
316  ctx->audio = 1;
317 
318  return 0;
319 }
320 
321 /* Wrap the AC-3 packet into an S337 payload that is in S16LE format which can be easily
322  injected into the PCM stream. Note: despite the function name, only AC-3 is implemented */
323 static int create_s337_payload(AVPacket *pkt, uint8_t **outbuf, int *outsize)
324 {
325  /* Note: if the packet size is not divisible by four, we need to make the actual
326  payload larger to ensure it ends on an two channel S16LE boundary */
327  int payload_size = FFALIGN(pkt->size, 4) + 8;
328  uint16_t bitcount = pkt->size * 8;
329  uint8_t *s337_payload;
330  PutByteContext pb;
331 
332  /* Sanity check: According to SMPTE ST 340:2015 Sec 4.1, the AC-3 sync frame will
333  exactly match the 1536 samples of baseband (PCM) audio that it represents. */
334  if (pkt->size > 1536)
335  return AVERROR(EINVAL);
336 
337  /* Encapsulate AC3 syncframe into SMPTE 337 packet */
338  s337_payload = (uint8_t *) av_malloc(payload_size);
339  if (s337_payload == NULL)
340  return AVERROR(ENOMEM);
341  bytestream2_init_writer(&pb, s337_payload, payload_size);
342  bytestream2_put_le16u(&pb, 0xf872); /* Sync word 1 */
343  bytestream2_put_le16u(&pb, 0x4e1f); /* Sync word 1 */
344  bytestream2_put_le16u(&pb, 0x0001); /* Burst Info, including data type (1=ac3) */
345  bytestream2_put_le16u(&pb, bitcount); /* Length code */
346  for (int i = 0; i < (pkt->size - 1); i += 2)
347  bytestream2_put_le16u(&pb, (pkt->data[i] << 8) | pkt->data[i+1]);
348 
349  /* Ensure final payload is aligned on 4-byte boundary */
350  if (pkt->size & 1)
351  bytestream2_put_le16u(&pb, pkt->data[pkt->size - 1] << 8);
352  if ((pkt->size & 3) == 1 || (pkt->size & 3) == 2)
353  bytestream2_put_le16u(&pb, 0);
354 
355  *outsize = payload_size;
356  *outbuf = s337_payload;
357  return 0;
358 }
359 
361 {
362  int ret = -1;
363 
364  switch(st->codecpar->codec_id) {
365 #if CONFIG_LIBKLVANC
366  case AV_CODEC_ID_EIA_608:
367  /* No special setup required */
368  ret = 0;
369  break;
370 #endif
371  default:
372  av_log(avctx, AV_LOG_ERROR, "Unsupported subtitle codec specified\n");
373  break;
374  }
375 
376  return ret;
377 }
378 
380 {
381  int ret = -1;
382 
383  switch(st->codecpar->codec_id) {
384 #if CONFIG_LIBKLVANC
386  /* No specific setup required */
387  ret = 0;
388  break;
389 #endif
390  default:
391  av_log(avctx, AV_LOG_ERROR, "Unsupported data codec specified\n");
392  break;
393  }
394 
395  return ret;
396 }
397 
399 {
400  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
401  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
402 
403  if (ctx->playback_started) {
404  BMDTimeValue actual;
405  ctx->dlo->StopScheduledPlayback(ctx->last_pts * ctx->bmd_tb_num,
406  &actual, ctx->bmd_tb_den);
407  ctx->dlo->DisableVideoOutput();
408  if (ctx->audio)
409  ctx->dlo->DisableAudioOutput();
410  }
411 
412  ff_decklink_cleanup(avctx);
413 
414  if (ctx->output_callback)
415  delete ctx->output_callback;
416 
417  pthread_mutex_destroy(&ctx->mutex);
418  pthread_cond_destroy(&ctx->cond);
419 
420 #if CONFIG_LIBKLVANC
421  klvanc_context_destroy(ctx->vanc_ctx);
422 #endif
423  ff_decklink_packet_queue_end(&ctx->vanc_queue);
424 
425  ff_ccfifo_uninit(&ctx->cc_fifo);
426  av_freep(&cctx->ctx);
427 
428  return 0;
429 }
430 
431 #if CONFIG_LIBKLVANC
432 static void construct_cc(AVFormatContext *avctx, struct decklink_ctx *ctx,
433  AVPacket *pkt, struct klvanc_line_set_s *vanc_lines)
434 {
435  struct klvanc_packet_eia_708b_s *cdp;
436  uint16_t *cdp_words;
437  uint16_t len;
438  uint8_t cc_count;
439  size_t size;
440  int ret, i;
441 
443  if (!data)
444  return;
445 
446  cc_count = size / 3;
447 
448  ret = klvanc_create_eia708_cdp(&cdp);
449  if (ret)
450  return;
451 
452  ret = klvanc_set_framerate_EIA_708B(cdp, ctx->bmd_tb_num, ctx->bmd_tb_den);
453  if (ret) {
454  av_log(avctx, AV_LOG_ERROR, "Invalid framerate specified: %" PRId64 "/%" PRId64 "\n",
455  ctx->bmd_tb_num, ctx->bmd_tb_den);
456  klvanc_destroy_eia708_cdp(cdp);
457  return;
458  }
459 
460  if (cc_count > KLVANC_MAX_CC_COUNT) {
461  av_log(avctx, AV_LOG_ERROR, "Illegal cc_count received: %d\n", cc_count);
462  cc_count = KLVANC_MAX_CC_COUNT;
463  }
464 
465  /* CC data */
466  cdp->header.ccdata_present = 1;
467  cdp->header.caption_service_active = 1;
468  cdp->ccdata.cc_count = cc_count;
469  for (i = 0; i < cc_count; i++) {
470  if (data [3*i] & 0x04)
471  cdp->ccdata.cc[i].cc_valid = 1;
472  cdp->ccdata.cc[i].cc_type = data[3*i] & 0x03;
473  cdp->ccdata.cc[i].cc_data[0] = data[3*i+1];
474  cdp->ccdata.cc[i].cc_data[1] = data[3*i+2];
475  }
476 
477  klvanc_finalize_EIA_708B(cdp, ctx->cdp_sequence_num++);
478  ret = klvanc_convert_EIA_708B_to_words(cdp, &cdp_words, &len);
479  klvanc_destroy_eia708_cdp(cdp);
480  if (ret != 0) {
481  av_log(avctx, AV_LOG_ERROR, "Failed converting 708 packet to words\n");
482  return;
483  }
484 
485  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, cdp_words, len, 11, 0);
486  free(cdp_words);
487  if (ret != 0) {
488  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
489  return;
490  }
491 }
492 
493 /* See SMPTE ST 2016-3:2009 */
494 static void construct_afd(AVFormatContext *avctx, struct decklink_ctx *ctx,
495  AVPacket *pkt, struct klvanc_line_set_s *vanc_lines,
496  AVStream *st)
497 {
498  struct klvanc_packet_afd_s *afd = NULL;
499  uint16_t *afd_words = NULL;
500  uint16_t len;
501  size_t size;
502  int f1_line = 12, f2_line = 0, ret;
503 
504  const uint8_t *data = av_packet_get_side_data(pkt, AV_PKT_DATA_AFD, &size);
505  if (!data || size == 0)
506  return;
507 
508  ret = klvanc_create_AFD(&afd);
509  if (ret)
510  return;
511 
512  ret = klvanc_set_AFD_val(afd, data[0]);
513  if (ret) {
514  av_log(avctx, AV_LOG_ERROR, "Invalid AFD value specified: %d\n",
515  data[0]);
516  klvanc_destroy_AFD(afd);
517  return;
518  }
519 
520  /* Compute the AR flag based on the DAR (see ST 2016-1:2009 Sec 9.1). Note, we treat
521  anything below 1.4 as 4:3 (as opposed to the standard 1.33), because there are lots
522  of streams in the field that aren't *exactly* 4:3 but a tiny bit larger after doing
523  the math... */
525  st->codecpar->height * st->codecpar->sample_aspect_ratio.den}, (AVRational) {14, 10}) == 1)
526  afd->aspectRatio = ASPECT_16x9;
527  else
528  afd->aspectRatio = ASPECT_4x3;
529 
530  ret = klvanc_convert_AFD_to_words(afd, &afd_words, &len);
531  if (ret) {
532  av_log(avctx, AV_LOG_ERROR, "Failed converting AFD packet to words\n");
533  goto out;
534  }
535 
536  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, afd_words, len, f1_line, 0);
537  if (ret) {
538  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
539  goto out;
540  }
541 
542  /* For interlaced video, insert into both fields. Switching lines for field 2
543  derived from SMPTE RP 168:2009, Sec 6, Table 2. */
544  switch (ctx->bmd_mode) {
545  case bmdModeNTSC:
546  case bmdModeNTSC2398:
547  f2_line = 273 - 10 + f1_line;
548  break;
549  case bmdModePAL:
550  f2_line = 319 - 6 + f1_line;
551  break;
552  case bmdModeHD1080i50:
553  case bmdModeHD1080i5994:
554  case bmdModeHD1080i6000:
555  f2_line = 569 - 7 + f1_line;
556  break;
557  default:
558  f2_line = 0;
559  break;
560  }
561 
562  if (f2_line > 0) {
563  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, afd_words, len, f2_line, 0);
564  if (ret) {
565  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
566  goto out;
567  }
568  }
569 
570 out:
571  if (afd)
572  klvanc_destroy_AFD(afd);
573  if (afd_words)
574  free(afd_words);
575 }
576 
577 /* Parse any EIA-608 subtitles sitting on the queue, and write packet side data
578  that will later be handled by construct_cc... */
579 static void parse_608subs(AVFormatContext *avctx, struct decklink_ctx *ctx, AVPacket *pkt)
580 {
581  size_t cc_size = ff_ccfifo_getoutputsize(&ctx->cc_fifo);
582  uint8_t *cc_data;
583 
584  if (!ff_ccfifo_ccdetected(&ctx->cc_fifo))
585  return;
586 
587  cc_data = av_packet_new_side_data(pkt, AV_PKT_DATA_A53_CC, cc_size);
588  if (cc_data)
589  ff_ccfifo_injectbytes(&ctx->cc_fifo, cc_data, cc_size);
590 }
591 
592 static int decklink_construct_vanc(AVFormatContext *avctx, struct decklink_ctx *ctx,
594  AVStream *st)
595 {
596  struct klvanc_line_set_s vanc_lines = { 0 };
597  int ret = 0, i;
598 
599  if (!ctx->supports_vanc)
600  return 0;
601 
602  parse_608subs(avctx, ctx, pkt);
603  construct_cc(avctx, ctx, pkt, &vanc_lines);
604  construct_afd(avctx, ctx, pkt, &vanc_lines, st);
605 
606  /* See if there any pending data packets to process */
607  while (ff_decklink_packet_queue_size(&ctx->vanc_queue) > 0) {
608  AVStream *vanc_st;
609  AVPacket vanc_pkt;
610  int64_t pts;
611 
612  pts = ff_decklink_packet_queue_peekpts(&ctx->vanc_queue);
613  if (pts > ctx->last_pts) {
614  /* We haven't gotten to the video frame we are supposed to inject
615  the oldest VANC packet into yet, so leave it on the queue... */
616  break;
617  }
618 
619  ret = ff_decklink_packet_queue_get(&ctx->vanc_queue, &vanc_pkt, 1);
620  if (vanc_pkt.pts + 1 < ctx->last_pts) {
621  av_log(avctx, AV_LOG_WARNING, "VANC packet too old, throwing away\n");
622  av_packet_unref(&vanc_pkt);
623  continue;
624  }
625 
626  vanc_st = avctx->streams[vanc_pkt.stream_index];
627  if (vanc_st->codecpar->codec_id == AV_CODEC_ID_SMPTE_2038) {
628  struct klvanc_smpte2038_anc_data_packet_s *pkt_2038 = NULL;
629 
630  klvanc_smpte2038_parse_pes_payload(vanc_pkt.data, vanc_pkt.size, &pkt_2038);
631  if (pkt_2038 == NULL) {
632  av_log(avctx, AV_LOG_ERROR, "failed to decode SMPTE 2038 PES packet");
633  av_packet_unref(&vanc_pkt);
634  continue;
635  }
636  for (int i = 0; i < pkt_2038->lineCount; i++) {
637  struct klvanc_smpte2038_anc_data_line_s *l = &pkt_2038->lines[i];
638  uint16_t *vancWords = NULL;
639  uint16_t vancWordCount;
640 
641  if (klvanc_smpte2038_convert_line_to_words(l, &vancWords,
642  &vancWordCount) < 0)
643  break;
644 
645  ret = klvanc_line_insert(ctx->vanc_ctx, &vanc_lines, vancWords,
646  vancWordCount, l->line_number, 0);
647  free(vancWords);
648  if (ret != 0) {
649  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
650  break;
651  }
652  }
653  klvanc_smpte2038_anc_data_packet_free(pkt_2038);
654  }
655  av_packet_unref(&vanc_pkt);
656  }
657 
658  IDeckLinkVideoFrameAncillary *vanc;
659  int result = ctx->dlo->CreateAncillaryData(bmdFormat10BitYUV, &vanc);
660  if (result != S_OK) {
661  av_log(avctx, AV_LOG_ERROR, "Failed to create vanc\n");
662  ret = AVERROR(EIO);
663  goto done;
664  }
665 
666  /* Now that we've got all the VANC lines in a nice orderly manner, generate the
667  final VANC sections for the Decklink output */
668  for (i = 0; i < vanc_lines.num_lines; i++) {
669  struct klvanc_line_s *line = vanc_lines.lines[i];
670  int real_line;
671  void *buf;
672 
673  if (!line)
674  break;
675 
676  /* FIXME: include hack for certain Decklink cards which mis-represent
677  line numbers for pSF frames */
678  real_line = line->line_number;
679 
680  result = vanc->GetBufferForVerticalBlankingLine(real_line, &buf);
681  if (result != S_OK) {
682  av_log(avctx, AV_LOG_ERROR, "Failed to get VANC line %d: %d", real_line, result);
683  continue;
684  }
685 
686  /* Generate the full line taking into account all VANC packets on that line */
687  result = klvanc_generate_vanc_line_v210(ctx->vanc_ctx, line, (uint8_t *) buf,
688  ctx->bmd_width);
689  if (result) {
690  av_log(avctx, AV_LOG_ERROR, "Failed to generate VANC line\n");
691  continue;
692  }
693  }
694 
695  result = frame->SetAncillaryData(vanc);
696  vanc->Release();
697  if (result != S_OK) {
698  av_log(avctx, AV_LOG_ERROR, "Failed to set vanc: %d", result);
699  ret = AVERROR(EIO);
700  }
701 
702 done:
703  for (i = 0; i < vanc_lines.num_lines; i++)
704  klvanc_line_free(vanc_lines.lines[i]);
705 
706  return ret;
707 }
708 #endif
709 
711 {
712  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
713  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
714  AVStream *st = avctx->streams[pkt->stream_index];
715  AVFrame *avframe = NULL, *tmp = (AVFrame *)pkt->data;
716  AVPacket *avpacket = NULL;
718  uint32_t buffered;
719  HRESULT hr;
720 
721  ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
722 
724  if (tmp->format != AV_PIX_FMT_UYVY422 ||
725  tmp->width != ctx->bmd_width ||
726  tmp->height != ctx->bmd_height) {
727  av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
728  return AVERROR(EINVAL);
729  }
730 
731  avframe = av_frame_clone(tmp);
732  if (!avframe) {
733  av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
734  return AVERROR(EIO);
735  }
736 
737  frame = new decklink_frame(ctx, avframe, st->codecpar->codec_id, avframe->height, avframe->width);
738  } else {
739  avpacket = av_packet_clone(pkt);
740  if (!avpacket) {
741  av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
742  return AVERROR(EIO);
743  }
744 
745  frame = new decklink_frame(ctx, avpacket, st->codecpar->codec_id, ctx->bmd_height, ctx->bmd_width);
746 
747 #if CONFIG_LIBKLVANC
748  if (decklink_construct_vanc(avctx, ctx, pkt, frame, st))
749  av_log(avctx, AV_LOG_ERROR, "Failed to construct VANC\n");
750 #endif
751  }
752 
753  if (!frame) {
754  av_log(avctx, AV_LOG_ERROR, "Could not create new frame.\n");
755  av_frame_free(&avframe);
756  av_packet_free(&avpacket);
757  return AVERROR(EIO);
758  }
759 
760  /* Always keep at most one second of frames buffered. */
761  pthread_mutex_lock(&ctx->mutex);
762  while (ctx->frames_buffer_available_spots == 0) {
763  pthread_cond_wait(&ctx->cond, &ctx->mutex);
764  }
765  ctx->frames_buffer_available_spots--;
766  pthread_mutex_unlock(&ctx->mutex);
767 
768  if (ctx->first_pts == AV_NOPTS_VALUE)
769  ctx->first_pts = pkt->pts;
770 
771  /* Schedule frame for playback. */
772  hr = ctx->dlo->ScheduleVideoFrame(frame,
773  pkt->pts * ctx->bmd_tb_num,
774  ctx->bmd_tb_num, ctx->bmd_tb_den);
775  /* Pass ownership to DeckLink, or release on failure */
776  frame->Release();
777  if (hr != S_OK) {
778  av_log(avctx, AV_LOG_ERROR, "Could not schedule video frame."
779  " error %08x.\n", (uint32_t) hr);
780  return AVERROR(EIO);
781  }
782 
783  ctx->dlo->GetBufferedVideoFrameCount(&buffered);
784  av_log(avctx, AV_LOG_DEBUG, "Buffered video frames: %d.\n", (int) buffered);
785  if (pkt->pts > 2 && buffered <= 2)
786  av_log(avctx, AV_LOG_WARNING, "There are not enough buffered video frames."
787  " Video may misbehave!\n");
788 
789  /* Preroll video frames. */
790  if (!ctx->playback_started && pkt->pts > (ctx->first_pts + ctx->frames_preroll)) {
791  av_log(avctx, AV_LOG_DEBUG, "Ending audio preroll.\n");
792  if (ctx->audio && ctx->dlo->EndAudioPreroll() != S_OK) {
793  av_log(avctx, AV_LOG_ERROR, "Could not end audio preroll!\n");
794  return AVERROR(EIO);
795  }
796  av_log(avctx, AV_LOG_DEBUG, "Starting scheduled playback.\n");
797  if (ctx->dlo->StartScheduledPlayback(ctx->first_pts * ctx->bmd_tb_num, ctx->bmd_tb_den, 1.0) != S_OK) {
798  av_log(avctx, AV_LOG_ERROR, "Could not start scheduled playback!\n");
799  return AVERROR(EIO);
800  }
801  ctx->playback_started = 1;
802  }
803 
804  return 0;
805 }
806 
808 {
809  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
810  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
811  AVStream *st = avctx->streams[pkt->stream_index];
812  int sample_count;
813  uint32_t buffered;
814  uint8_t *outbuf = NULL;
815  int ret = 0;
816 
817  ctx->dlo->GetBufferedAudioSampleFrameCount(&buffered);
818  if (pkt->pts > 1 && !buffered)
819  av_log(avctx, AV_LOG_WARNING, "There's no buffered audio."
820  " Audio will misbehave!\n");
821 
822  if (st->codecpar->codec_id == AV_CODEC_ID_AC3) {
823  /* Encapsulate AC3 syncframe into SMPTE 337 packet */
824  int outbuf_size;
825  ret = create_s337_payload(pkt, &outbuf, &outbuf_size);
826  if (ret < 0)
827  return ret;
828  sample_count = outbuf_size / 4;
829  } else {
830  sample_count = pkt->size / (ctx->channels << 1);
831  outbuf = pkt->data;
832  }
833 
834  if (ctx->dlo->ScheduleAudioSamples(outbuf, sample_count, pkt->pts,
835  bmdAudioSampleRate48kHz, NULL) != S_OK) {
836  av_log(avctx, AV_LOG_ERROR, "Could not schedule audio samples.\n");
837  ret = AVERROR(EIO);
838  }
839 
840  if (st->codecpar->codec_id == AV_CODEC_ID_AC3)
841  av_freep(&outbuf);
842 
843  return ret;
844 }
845 
847 {
848  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
849  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
850 
851  ff_ccfifo_extractbytes(&ctx->cc_fifo, pkt->data, pkt->size);
852 
853  return 0;
854 }
855 
857 {
858  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
859  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
860 
861  if (ff_decklink_packet_queue_put(&ctx->vanc_queue, pkt) < 0) {
862  av_log(avctx, AV_LOG_WARNING, "Failed to queue DATA packet\n");
863  }
864 
865  return 0;
866 }
867 
868 extern "C" {
869 
871 {
872  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
873  struct decklink_ctx *ctx;
874  unsigned int n;
875  int ret;
876 
877  ctx = (struct decklink_ctx *) av_mallocz(sizeof(struct decklink_ctx));
878  if (!ctx)
879  return AVERROR(ENOMEM);
880  ctx->list_devices = cctx->list_devices;
881  ctx->list_formats = cctx->list_formats;
882  ctx->preroll = cctx->preroll;
883  ctx->duplex_mode = cctx->duplex_mode;
884  ctx->first_pts = AV_NOPTS_VALUE;
885  if (cctx->link > 0 && (unsigned int)cctx->link < FF_ARRAY_ELEMS(decklink_link_conf_map))
886  ctx->link = decklink_link_conf_map[cctx->link];
887  cctx->ctx = ctx;
888 #if CONFIG_LIBKLVANC
889  if (klvanc_context_create(&ctx->vanc_ctx) < 0) {
890  av_log(avctx, AV_LOG_ERROR, "Cannot create VANC library context\n");
891  return AVERROR(ENOMEM);
892  }
893  ctx->supports_vanc = 1;
894 #endif
895 
896  /* List available devices and exit. */
897  if (ctx->list_devices) {
898  ff_decklink_list_devices_legacy(avctx, 0, 1);
899  return AVERROR_EXIT;
900  }
901 
902  ret = ff_decklink_init_device(avctx, avctx->url);
903  if (ret < 0)
904  return ret;
905 
906  /* Get output device. */
907  if (ctx->dl->QueryInterface(IID_IDeckLinkOutput_v14_2_1, (void **) &ctx->dlo) != S_OK) {
908  av_log(avctx, AV_LOG_ERROR, "Could not open output device from '%s'\n",
909  avctx->url);
910  ret = AVERROR(EIO);
911  goto error;
912  }
913 
914  /* List supported formats. */
915  if (ctx->list_formats) {
917  ret = AVERROR_EXIT;
918  goto error;
919  }
920 
921  /* Setup streams. */
922  ret = AVERROR(EIO);
923  for (n = 0; n < avctx->nb_streams; n++) {
924  AVStream *st = avctx->streams[n];
926  if (c->codec_type == AVMEDIA_TYPE_AUDIO) {
927  if (decklink_setup_audio(avctx, st))
928  goto error;
929  } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
930  if (decklink_setup_video(avctx, st))
931  goto error;
932  } else if (c->codec_type == AVMEDIA_TYPE_DATA) {
933  if (decklink_setup_data(avctx, st))
934  goto error;
935  } else if (c->codec_type == AVMEDIA_TYPE_SUBTITLE) {
936  if (decklink_setup_subtitle(avctx, st))
937  goto error;
938  } else {
939  av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");
940  goto error;
941  }
942  }
943 
944  /* Reconfigure the data/subtitle stream clocks to match the video */
945  for (n = 0; n < avctx->nb_streams; n++) {
946  AVStream *st = avctx->streams[n];
948 
949  if(c->codec_type == AVMEDIA_TYPE_DATA ||
950  c->codec_type == AVMEDIA_TYPE_SUBTITLE)
951  avpriv_set_pts_info(st, 64, ctx->bmd_tb_num, ctx->bmd_tb_den);
952  }
953  ff_decklink_packet_queue_init(avctx, &ctx->vanc_queue, cctx->vanc_queue_size);
954 
955  ret = ff_ccfifo_init(&ctx->cc_fifo, av_make_q(ctx->bmd_tb_den, ctx->bmd_tb_num), avctx);
956  if (ret < 0) {
957  av_log(ctx, AV_LOG_ERROR, "Failure to setup CC FIFO queue\n");
958  goto error;
959  }
960 
961  return 0;
962 
963 error:
964  ff_decklink_cleanup(avctx);
965  return ret;
966 }
967 
969 {
970  AVStream *st = avctx->streams[pkt->stream_index];
971 
973  return decklink_write_video_packet(avctx, pkt);
974  else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
975  return decklink_write_audio_packet(avctx, pkt);
976  else if (st->codecpar->codec_type == AVMEDIA_TYPE_DATA)
977  return decklink_write_data_packet(avctx, pkt);
978  else if (st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE)
979  return decklink_write_subtitle_packet(avctx, pkt);
980 
981  return AVERROR(EIO);
982 }
983 
985 {
986  return ff_decklink_list_devices(avctx, device_list, 0, 1);
987 }
988 
989 } /* extern "C" */
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:32
AV_CODEC_ID_PCM_S16LE
@ AV_CODEC_ID_PCM_S16LE
Definition: codec_id.h:337
AV_CODEC_ID_EIA_608
@ AV_CODEC_ID_EIA_608
Definition: codec_id.h:581
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: packet.c:432
AVMEDIA_TYPE_SUBTITLE
@ AVMEDIA_TYPE_SUBTITLE
Definition: avutil.h:203
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AV_CODEC_ID_AC3
@ AV_CODEC_ID_AC3
Definition: codec_id.h:462
ff_ccfifo_extractbytes
int ff_ccfifo_extractbytes(CCFifo *ccf, uint8_t *cc_bytes, size_t len)
Just like ff_ccfifo_extract(), but takes the raw bytes instead of an AVFrame.
Definition: ccfifo.c:154
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecParameters::codec_type
enum AVMediaType codec_type
General type of the encoded data.
Definition: codec_par.h:51
out
FILE * out
Definition: movenc.c:55
AVCodecParameters
This struct describes the properties of an encoded stream.
Definition: codec_par.h:47
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
int64_t
long long int64_t
Definition: coverity.c:34
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
AVFormatContext::streams
AVStream ** streams
A list of all streams in the file.
Definition: avformat.h:1332
AVFrame::width
int width
Definition: frame.h:499
AVPacket::data
uint8_t * data
Definition: packet.h:588
pthread_mutex_lock
static av_always_inline int pthread_mutex_lock(pthread_mutex_t *mutex)
Definition: os2threads.h:119
data
const char data[16]
Definition: mxf.c:149
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_packet_free
void av_packet_free(AVPacket **pkt)
Free the packet, if the packet is reference counted, it will be unreferenced first.
Definition: packet.c:74
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:448
avpriv_set_pts_info
void avpriv_set_pts_info(AVStream *st, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
Definition: avformat.c:777
ff_ccfifo_uninit
void ff_ccfifo_uninit(CCFifo *ccf)
Free all memory allocated in a CCFifo and clear the context.
Definition: ccfifo.c:46
pts
static int64_t pts
Definition: transcode_aac.c:644
AVRational::num
int num
Numerator.
Definition: rational.h:59
pkt
AVPacket * pkt
Definition: movenc.c:60
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:106
bytestream2_init_writer
static av_always_inline void bytestream2_init_writer(PutByteContext *p, uint8_t *buf, int buf_size)
Definition: bytestream.h:147
ff_ccfifo_getoutputsize
static int ff_ccfifo_getoutputsize(const CCFifo *ccf)
Provide the size in bytes of an output buffer to allocate.
Definition: ccfifo.h:95
pthread_mutex_unlock
static av_always_inline int pthread_mutex_unlock(pthread_mutex_t *mutex)
Definition: os2threads.h:126
AVCodecParameters::sample_aspect_ratio
AVRational sample_aspect_ratio
Video only.
Definition: codec_par.h:144
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:201
AVCodecParameters::width
int width
Video only.
Definition: codec_par.h:134
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:483
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:410
AVMEDIA_TYPE_DATA
@ AVMEDIA_TYPE_DATA
Opaque data information usually continuous.
Definition: avutil.h:202
av_mallocz
#define av_mallocz(s)
Definition: tableprint_vlc.h:31
AV_CODEC_ID_WRAPPED_AVFRAME
@ AV_CODEC_ID_WRAPPED_AVFRAME
Passthrough codec, AVFrames wrapped in AVPacket.
Definition: codec_id.h:625
pthread_cond_broadcast
static av_always_inline int pthread_cond_broadcast(pthread_cond_t *cond)
Definition: os2threads.h:162
tmp
static uint8_t tmp[40]
Definition: aes_ctr.c:52
AVFormatContext
Format I/O context.
Definition: avformat.h:1264
internal.h
AVStream::codecpar
AVCodecParameters * codecpar
Codec parameters associated with this stream.
Definition: avformat.h:767
result
and forward the result(frame or status change) to the corresponding input. If nothing is possible
AVStream::time_base
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
Definition: avformat.h:783
NULL
#define NULL
Definition: coverity.c:32
format
New swscale design to change SwsGraph is what coordinates multiple passes These can include cascaded scaling error diffusion and so on Or we could have separate passes for the vertical and horizontal scaling In between each SwsPass lies a fully allocated image buffer Graph passes may have different levels of e g we can have a single threaded error diffusion pass following a multi threaded scaling pass SwsGraph is internally recreated whenever the image format
Definition: swscale-v2.txt:14
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:49
AVFormatContext::nb_streams
unsigned int nb_streams
Number of elements in AVFormatContext.streams.
Definition: avformat.h:1320
PutByteContext
Definition: bytestream.h:37
AVPacket::size
int size
Definition: packet.h:589
height
#define height
Definition: dsp.h:89
AVFormatContext::url
char * url
input or output URL.
Definition: avformat.h:1380
size
int size
Definition: twinvq_data.h:10344
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:247
AV_CODEC_ID_V210
@ AV_CODEC_ID_V210
Definition: codec_id.h:179
frame.h
avdevice.h
line
Definition: graph2dot.c:48
pthread_cond_destroy
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:144
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:581
av_packet_get_side_data
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, size_t *size)
Get side information from packet.
Definition: packet.c:252
AV_CODEC_ID_SMPTE_2038
@ AV_CODEC_ID_SMPTE_2038
Definition: codec_id.h:613
internal.h
AVCodecParameters::height
int height
Definition: codec_par.h:135
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:496
len
int len
Definition: vorbis_enc_data.h:426
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
ff_ccfifo_init
int ff_ccfifo_init(CCFifo *ccf, AVRational framerate, void *log_ctx)
Initialize a CCFifo.
Definition: ccfifo.c:53
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:744
ff_ccfifo_injectbytes
int ff_ccfifo_injectbytes(CCFifo *ccf, uint8_t *cc_data, size_t len)
Just like ff_ccfifo_inject(), but takes the raw bytes to insert the CC data int rather than an AVFram...
Definition: ccfifo.c:92
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
AVDeviceInfoList
List of devices.
Definition: avdevice.h:343
av_malloc
void * av_malloc(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:98
avformat.h
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:88
AVFrame::height
int height
Definition: frame.h:499
av_packet_new_side_data
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
Definition: packet.c:231
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
ff_ccfifo_ccdetected
static int ff_ccfifo_ccdetected(const CCFifo *ccf)
Returns 1 if captions have been found as a prior call to ff_ccfifo_extract() or ff_ccfifo_extractbyte...
Definition: ccfifo.h:124
AV_PKT_DATA_AFD
@ AV_PKT_DATA_AFD
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: packet.h:258
AVPacket::stream_index
int stream_index
Definition: packet.h:590
pthread_cond_wait
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:192
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
AV_PKT_DATA_A53_CC
@ AV_PKT_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
Definition: packet.h:239
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVCodecParameters::codec_id
enum AVCodecID codec_id
Specific type of the encoded data (the codec used).
Definition: codec_par.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:565
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
bytestream.h
imgutils.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:472
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
pthread_cond_init
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:133
AVERROR_EXIT
#define AVERROR_EXIT
Immediate exit was requested; the called function should not be restarted.
Definition: error.h:58
width
#define width
Definition: dsp.h:89
AVFormatContext::priv_data
void * priv_data
Format private data.
Definition: avformat.h:1292
av_packet_clone
AVPacket * av_packet_clone(const AVPacket *src)
Create a new packet that references the same data as src.
Definition: packet.c:476