FFmpeg
mpeg12dec.c
Go to the documentation of this file.
1 /*
2  * MPEG-1/2 decoder
3  * Copyright (c) 2000, 2001 Fabrice Bellard
4  * Copyright (c) 2002-2013 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * MPEG-1/2 decoder
26  */
27 
28 #include "config_components.h"
29 
30 #define UNCHECKED_BITSTREAM_READER 1
31 #include <inttypes.h>
32 #include <stdatomic.h>
33 
34 #include "libavutil/attributes.h"
35 #include "libavutil/emms.h"
36 #include "libavutil/imgutils.h"
37 #include "libavutil/internal.h"
38 #include "libavutil/mem_internal.h"
39 #include "libavutil/reverse.h"
40 #include "libavutil/stereo3d.h"
41 #include "libavutil/timecode.h"
42 
43 #include "avcodec.h"
44 #include "codec_internal.h"
45 #include "decode.h"
46 #include "error_resilience.h"
47 #include "get_bits.h"
48 #include "hwaccel_internal.h"
49 #include "hwconfig.h"
50 #include "idctdsp.h"
51 #include "mpeg_er.h"
52 #include "mpeg12.h"
53 #include "mpeg12data.h"
54 #include "mpeg12dec.h"
55 #include "mpegutils.h"
56 #include "mpegvideo.h"
57 #include "mpegvideodata.h"
58 #include "mpegvideodec.h"
59 #include "profiles.h"
60 #include "startcode.h"
61 
62 #define A53_MAX_CC_COUNT 2000
63 
70 };
71 
72 typedef struct Mpeg12SliceContext {
75 
76  int last_dc[3]; ///< last DC values
77 
78  DECLARE_ALIGNED_32(int16_t, block)[12][64];
80 
81 typedef struct Mpeg1Context {
83  AVPanScan pan_scan; /* some temporary storage for the panscan */
88  uint8_t afd;
89  int has_afd;
93  AVRational frame_rate_ext; /* MPEG-2 specific framerate modificator */
94  unsigned frame_rate_index;
95  int sync; /* Did we reach a sync point like a GOP/SEQ/KEYFrame? */
97  int tmpgexs;
102  int64_t timecode_frame_start; /*< GOP timecode frame start number, in non drop frame format */
103 } Mpeg1Context;
104 
105 /* as H.263, but only 17 codes */
106 static int mpeg_decode_motion(Mpeg12SliceContext *const s, int fcode, int pred)
107 {
108  int code, sign, val, shift;
109 
110  code = get_vlc2(&s->gb, ff_mv_vlc, MV_VLC_BITS, 2);
111  if (code == 0)
112  return pred;
113  if (code < 0)
114  return 0xffff;
115 
116  sign = get_bits1(&s->gb);
117  shift = fcode - 1;
118  val = code;
119  if (shift) {
120  val = (val - 1) << shift;
121  val |= get_bits(&s->gb, shift);
122  val++;
123  }
124  if (sign)
125  val = -val;
126  val += pred;
127 
128  /* modulo decoding */
129  return sign_extend(val, 5 + shift);
130 }
131 
132 #define MAX_INDEX (64 - 1)
133 #define check_scantable_index(ctx, x) \
134  do { \
135  if ((x) > MAX_INDEX) { \
136  av_log(ctx->c.avctx, AV_LOG_ERROR, "ac-tex damaged at %d %d\n", \
137  ctx->c.mb_x, ctx->c.mb_y); \
138  return AVERROR_INVALIDDATA; \
139  } \
140  } while (0)
141 
143  int16_t *block, int n)
144 {
145  int level, i, j, run;
146  const uint8_t *const scantable = s->c.intra_scantable.permutated;
147  const uint16_t *quant_matrix = s->c.inter_matrix;
148  const int qscale = s->c.qscale;
149 
150  {
151  OPEN_READER(re, &s->gb);
152  i = -1;
153  // special case for first coefficient, no need to add second VLC table
154  UPDATE_CACHE(re, &s->gb);
155  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
156  level = (3 * qscale * quant_matrix[0]) >> 5;
157  level = (level - 1) | 1;
158  if (GET_CACHE(re, &s->gb) & 0x40000000)
159  level = -level;
160  block[0] = level;
161  i++;
162  SKIP_BITS(re, &s->gb, 2);
163  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
164  goto end;
165  }
166  /* now quantify & encode AC coefficients */
167  for (;;) {
168  GET_RL_VLC(level, run, re, &s->gb, ff_mpeg1_rl_vlc,
169  TEX_VLC_BITS, 2, 0);
170 
171  if (level != 0) {
172  i += run;
173  if (i > MAX_INDEX)
174  break;
175  j = scantable[i];
176  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
177  level = (level - 1) | 1;
178  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
179  SHOW_SBITS(re, &s->gb, 1);
180  SKIP_BITS(re, &s->gb, 1);
181  } else {
182  /* escape */
183  run = SHOW_UBITS(re, &s->gb, 6) + 1;
184  LAST_SKIP_BITS(re, &s->gb, 6);
185  UPDATE_CACHE(re, &s->gb);
186  level = SHOW_SBITS(re, &s->gb, 8);
187  SKIP_BITS(re, &s->gb, 8);
188  if (level == -128) {
189  level = SHOW_UBITS(re, &s->gb, 8) - 256;
190  SKIP_BITS(re, &s->gb, 8);
191  } else if (level == 0) {
192  level = SHOW_UBITS(re, &s->gb, 8);
193  SKIP_BITS(re, &s->gb, 8);
194  }
195  i += run;
196  if (i > MAX_INDEX)
197  break;
198  j = scantable[i];
199  if (level < 0) {
200  level = -level;
201  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
202  level = (level - 1) | 1;
203  level = -level;
204  } else {
205  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
206  level = (level - 1) | 1;
207  }
208  }
209 
210  block[j] = level;
211  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
212  break;
213  UPDATE_CACHE(re, &s->gb);
214  }
215 end:
216  LAST_SKIP_BITS(re, &s->gb, 2);
217  CLOSE_READER(re, &s->gb);
218  }
219 
221 
222  s->c.block_last_index[n] = i;
223  return 0;
224 }
225 
227  int16_t *block, int n)
228 {
229  int level, i, j, run;
230  const uint8_t *const scantable = s->c.intra_scantable.permutated;
231  const uint16_t *quant_matrix;
232  const int qscale = s->c.qscale;
233  int mismatch;
234 
235  mismatch = 1;
236 
237  {
238  OPEN_READER(re, &s->gb);
239  i = -1;
240  if (n < 4)
241  quant_matrix = s->c.inter_matrix;
242  else
243  quant_matrix = s->c.chroma_inter_matrix;
244 
245  // Special case for first coefficient, no need to add second VLC table.
246  UPDATE_CACHE(re, &s->gb);
247  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
248  level = (3 * qscale * quant_matrix[0]) >> 5;
249  if (GET_CACHE(re, &s->gb) & 0x40000000)
250  level = -level;
251  block[0] = level;
252  mismatch ^= level;
253  i++;
254  SKIP_BITS(re, &s->gb, 2);
255  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
256  goto end;
257  }
258 
259  /* now quantify & encode AC coefficients */
260  for (;;) {
261  GET_RL_VLC(level, run, re, &s->gb, ff_mpeg1_rl_vlc,
262  TEX_VLC_BITS, 2, 0);
263 
264  if (level != 0) {
265  i += run;
266  if (i > MAX_INDEX)
267  break;
268  j = scantable[i];
269  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
270  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
271  SHOW_SBITS(re, &s->gb, 1);
272  SKIP_BITS(re, &s->gb, 1);
273  } else {
274  /* escape */
275  run = SHOW_UBITS(re, &s->gb, 6) + 1;
276  LAST_SKIP_BITS(re, &s->gb, 6);
277  UPDATE_CACHE(re, &s->gb);
278  level = SHOW_SBITS(re, &s->gb, 12);
279  SKIP_BITS(re, &s->gb, 12);
280 
281  i += run;
282  if (i > MAX_INDEX)
283  break;
284  j = scantable[i];
285  if (level < 0) {
286  level = ((-level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
287  level = -level;
288  } else {
289  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
290  }
291  }
292 
293  mismatch ^= level;
294  block[j] = level;
295  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
296  break;
297  UPDATE_CACHE(re, &s->gb);
298  }
299 end:
300  LAST_SKIP_BITS(re, &s->gb, 2);
301  CLOSE_READER(re, &s->gb);
302  }
303  block[63] ^= (mismatch & 1);
304 
306 
307  s->c.block_last_index[n] = i;
308  return 0;
309 }
310 
312  int16_t *block, int n)
313 {
314  int level, dc, diff, i, j, run;
315  int component;
316  const RL_VLC_ELEM *rl_vlc;
317  const uint8_t *const scantable = s->c.intra_scantable.permutated;
318  const uint16_t *quant_matrix;
319  const int qscale = s->c.qscale;
320  int mismatch;
321 
322  /* DC coefficient */
323  if (n < 4) {
324  quant_matrix = s->c.intra_matrix;
325  component = 0;
326  } else {
327  quant_matrix = s->c.chroma_intra_matrix;
328  component = (n & 1) + 1;
329  }
330  diff = decode_dc(&s->gb, component);
331  dc = s->last_dc[component];
332  dc += diff;
333  s->last_dc[component] = dc;
334  block[0] = dc * (1 << (3 - s->c.intra_dc_precision));
335  ff_tlog(s->c.avctx, "dc=%d\n", block[0]);
336  mismatch = block[0] ^ 1;
337  i = 0;
338  if (s->c.intra_vlc_format)
340  else
342 
343  {
344  OPEN_READER(re, &s->gb);
345  /* now quantify & encode AC coefficients */
346  for (;;) {
347  UPDATE_CACHE(re, &s->gb);
348  GET_RL_VLC(level, run, re, &s->gb, rl_vlc,
349  TEX_VLC_BITS, 2, 0);
350 
351  if (level == 127) {
352  break;
353  } else if (level != 0) {
354  i += run;
355  if (i > MAX_INDEX)
356  break;
357  j = scantable[i];
358  level = (level * qscale * quant_matrix[j]) >> 4;
359  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
360  SHOW_SBITS(re, &s->gb, 1);
361  LAST_SKIP_BITS(re, &s->gb, 1);
362  } else {
363  /* escape */
364  run = SHOW_UBITS(re, &s->gb, 6) + 1;
365  SKIP_BITS(re, &s->gb, 6);
366  level = SHOW_SBITS(re, &s->gb, 12);
367  LAST_SKIP_BITS(re, &s->gb, 12);
368  i += run;
369  if (i > MAX_INDEX)
370  break;
371  j = scantable[i];
372  if (level < 0) {
373  level = (-level * qscale * quant_matrix[j]) >> 4;
374  level = -level;
375  } else {
376  level = (level * qscale * quant_matrix[j]) >> 4;
377  }
378  }
379 
380  mismatch ^= level;
381  block[j] = level;
382  }
383  CLOSE_READER(re, &s->gb);
384  }
385  block[63] ^= mismatch & 1;
386 
388 
389  return 0;
390 }
391 
392 static inline int get_dmv(Mpeg12SliceContext *const s)
393 {
394  if (get_bits1(&s->gb))
395  return 1 - (get_bits1(&s->gb) << 1);
396  else
397  return 0;
398 }
399 
400 /* motion type (for MPEG-2) */
401 #define MT_FIELD 1
402 #define MT_FRAME 2
403 #define MT_16X8 2
404 #define MT_DMV 3
405 
406 static int mpeg_decode_mb(Mpeg12SliceContext *const s, int *mb_skip_run)
407 {
408  int i, j, k, cbp, val, mb_type, motion_type;
409  const int mb_block_count = 4 + (1 << s->c.chroma_format);
410  int ret;
411 
412  ff_tlog(s->c.avctx, "decode_mb: x=%d y=%d\n", s->c.mb_x, s->c.mb_y);
413 
414  av_assert2(s->c.mb_skipped == 0);
415 
416  if ((*mb_skip_run)-- != 0) {
417  if (s->c.pict_type == AV_PICTURE_TYPE_P) {
418  s->c.mb_skipped = 1;
419  s->c.cur_pic.mb_type[s->c.mb_x + s->c.mb_y * s->c.mb_stride] =
421  } else {
422  int mb_type;
423 
424  if (s->c.mb_x)
425  mb_type = s->c.cur_pic.mb_type[s->c.mb_x + s->c.mb_y * s->c.mb_stride - 1];
426  else
427  // FIXME not sure if this is allowed in MPEG at all
428  mb_type = s->c.cur_pic.mb_type[s->c.mb_width + (s->c.mb_y - 1) * s->c.mb_stride - 1];
429  if (IS_INTRA(mb_type)) {
430  av_log(s->c.avctx, AV_LOG_ERROR, "skip with previntra\n");
431  return AVERROR_INVALIDDATA;
432  }
433  s->c.cur_pic.mb_type[s->c.mb_x + s->c.mb_y * s->c.mb_stride] =
434  mb_type | MB_TYPE_SKIP;
435 
436  if ((s->c.mv[0][0][0] | s->c.mv[0][0][1] | s->c.mv[1][0][0] | s->c.mv[1][0][1]) == 0)
437  s->c.mb_skipped = 1;
438  }
439 
440  return 0;
441  }
442 
443  switch (s->c.pict_type) {
444  default:
445  case AV_PICTURE_TYPE_I:
446  if (get_bits1(&s->gb) == 0) {
447  if (get_bits1(&s->gb) == 0) {
448  av_log(s->c.avctx, AV_LOG_ERROR,
449  "Invalid mb type in I-frame at %d %d\n",
450  s->c.mb_x, s->c.mb_y);
451  return AVERROR_INVALIDDATA;
452  }
453  mb_type = MB_TYPE_QUANT | MB_TYPE_INTRA;
454  } else {
455  mb_type = MB_TYPE_INTRA;
456  }
457  break;
458  case AV_PICTURE_TYPE_P:
459  mb_type = get_vlc2(&s->gb, ff_mb_ptype_vlc, MB_PTYPE_VLC_BITS, 1);
460  if (mb_type < 0) {
461  av_log(s->c.avctx, AV_LOG_ERROR,
462  "Invalid mb type in P-frame at %d %d\n", s->c.mb_x, s->c.mb_y);
463  return AVERROR_INVALIDDATA;
464  }
465  break;
466  case AV_PICTURE_TYPE_B:
467  mb_type = get_vlc2(&s->gb, ff_mb_btype_vlc, MB_BTYPE_VLC_BITS, 1);
468  if (mb_type < 0) {
469  av_log(s->c.avctx, AV_LOG_ERROR,
470  "Invalid mb type in B-frame at %d %d\n", s->c.mb_x, s->c.mb_y);
471  return AVERROR_INVALIDDATA;
472  }
473  break;
474  }
475  ff_tlog(s->c.avctx, "mb_type=%x\n", mb_type);
476 // motion_type = 0; /* avoid warning */
477  if (IS_INTRA(mb_type)) {
478  s->c.bdsp.clear_blocks(s->block[0]);
479 
480  if (!s->c.chroma_y_shift)
481  s->c.bdsp.clear_blocks(s->block[6]);
482 
483  /* compute DCT type */
484  // FIXME: add an interlaced_dct coded var?
485  if (s->c.picture_structure == PICT_FRAME &&
486  !s->c.frame_pred_frame_dct)
487  s->c.interlaced_dct = get_bits1(&s->gb);
488 
489  if (IS_QUANT(mb_type))
490  s->c.qscale = mpeg_get_qscale(&s->gb, s->c.q_scale_type);
491 
492  if (s->c.concealment_motion_vectors) {
493  /* just parse them */
494  if (s->c.picture_structure != PICT_FRAME)
495  skip_bits1(&s->gb); /* field select */
496 
497  s->c.mv[0][0][0] =
498  s->c.last_mv[0][0][0] =
499  s->c.last_mv[0][1][0] = mpeg_decode_motion(s, s->c.mpeg_f_code[0][0],
500  s->c.last_mv[0][0][0]);
501  s->c.mv[0][0][1] =
502  s->c.last_mv[0][0][1] =
503  s->c.last_mv[0][1][1] = mpeg_decode_motion(s, s->c.mpeg_f_code[0][1],
504  s->c.last_mv[0][0][1]);
505 
506  check_marker(s->c.avctx, &s->gb, "after concealment_motion_vectors");
507  } else {
508  /* reset mv prediction */
509  memset(s->c.last_mv, 0, sizeof(s->c.last_mv));
510  }
511  s->c.mb_intra = 1;
512 
513  if (s->c.codec_id == AV_CODEC_ID_MPEG2VIDEO) {
514  for (i = 0; i < mb_block_count; i++)
515  if ((ret = mpeg2_decode_block_intra(s, s->block[i], i)) < 0)
516  return ret;
517  } else {
518  for (i = 0; i < 6; i++) {
520  s->c.intra_matrix,
521  s->c.intra_scantable.permutated,
522  s->last_dc, s->block[i],
523  i, s->c.qscale);
524  if (ret < 0) {
525  av_log(s->c.avctx, AV_LOG_ERROR, "ac-tex damaged at %d %d\n",
526  s->c.mb_x, s->c.mb_y);
527  return ret;
528  }
529  }
530  }
531  } else {
532  if (mb_type & MB_TYPE_ZERO_MV) {
533  av_assert2(mb_type & MB_TYPE_CBP);
534 
535  s->c.mv_dir = MV_DIR_FORWARD;
536  if (s->c.picture_structure == PICT_FRAME) {
537  if (s->c.picture_structure == PICT_FRAME
538  && !s->c.frame_pred_frame_dct)
539  s->c.interlaced_dct = get_bits1(&s->gb);
540  s->c.mv_type = MV_TYPE_16X16;
541  } else {
542  s->c.mv_type = MV_TYPE_FIELD;
543  mb_type |= MB_TYPE_INTERLACED;
544  s->c.field_select[0][0] = s->c.picture_structure - 1;
545  }
546 
547  if (IS_QUANT(mb_type))
548  s->c.qscale = mpeg_get_qscale(&s->gb, s->c.q_scale_type);
549 
550  s->c.last_mv[0][0][0] = 0;
551  s->c.last_mv[0][0][1] = 0;
552  s->c.last_mv[0][1][0] = 0;
553  s->c.last_mv[0][1][1] = 0;
554  s->c.mv[0][0][0] = 0;
555  s->c.mv[0][0][1] = 0;
556  } else {
557  av_assert2(mb_type & MB_TYPE_BIDIR_MV);
558  // FIXME decide if MBs in field pictures are MB_TYPE_INTERLACED
559  /* get additional motion vector type */
560  if (s->c.picture_structure == PICT_FRAME && s->c.frame_pred_frame_dct) {
561  motion_type = MT_FRAME;
562  } else {
563  motion_type = get_bits(&s->gb, 2);
564  if (s->c.picture_structure == PICT_FRAME && HAS_CBP(mb_type))
565  s->c.interlaced_dct = get_bits1(&s->gb);
566  }
567 
568  if (IS_QUANT(mb_type))
569  s->c.qscale = mpeg_get_qscale(&s->gb, s->c.q_scale_type);
570 
571  /* motion vectors */
572  s->c.mv_dir = MB_TYPE_MV_2_MV_DIR(mb_type);
573  ff_tlog(s->c.avctx, "motion_type=%d\n", motion_type);
574  switch (motion_type) {
575  case MT_FRAME: /* or MT_16X8 */
576  if (s->c.picture_structure == PICT_FRAME) {
577  mb_type |= MB_TYPE_16x16;
578  s->c.mv_type = MV_TYPE_16X16;
579  for (i = 0; i < 2; i++) {
580  if (HAS_MV(mb_type, i)) {
581  /* MT_FRAME */
582  s->c.mv[i][0][0] =
583  s->c.last_mv[i][0][0] =
584  s->c.last_mv[i][1][0] =
585  mpeg_decode_motion(s, s->c.mpeg_f_code[i][0],
586  s->c.last_mv[i][0][0]);
587  s->c.mv[i][0][1] =
588  s->c.last_mv[i][0][1] =
589  s->c.last_mv[i][1][1] =
590  mpeg_decode_motion(s, s->c.mpeg_f_code[i][1],
591  s->c.last_mv[i][0][1]);
592  /* full_pel: only for MPEG-1 */
593  if (s->c.full_pel[i]) {
594  s->c.mv[i][0][0] *= 2;
595  s->c.mv[i][0][1] *= 2;
596  }
597  }
598  }
599  } else {
600  mb_type |= MB_TYPE_16x8 | MB_TYPE_INTERLACED;
601  s->c.mv_type = MV_TYPE_16X8;
602  for (i = 0; i < 2; i++) {
603  if (HAS_MV(mb_type, i)) {
604  /* MT_16X8 */
605  for (j = 0; j < 2; j++) {
606  s->c.field_select[i][j] = get_bits1(&s->gb);
607  for (k = 0; k < 2; k++) {
608  val = mpeg_decode_motion(s, s->c.mpeg_f_code[i][k],
609  s->c.last_mv[i][j][k]);
610  s->c.last_mv[i][j][k] = val;
611  s->c.mv[i][j][k] = val;
612  }
613  }
614  }
615  }
616  }
617  break;
618  case MT_FIELD:
619  s->c.mv_type = MV_TYPE_FIELD;
620  if (s->c.picture_structure == PICT_FRAME) {
621  mb_type |= MB_TYPE_16x8 | MB_TYPE_INTERLACED;
622  for (i = 0; i < 2; i++) {
623  if (HAS_MV(mb_type, i)) {
624  for (j = 0; j < 2; j++) {
625  s->c.field_select[i][j] = get_bits1(&s->gb);
626  val = mpeg_decode_motion(s, s->c.mpeg_f_code[i][0],
627  s->c.last_mv[i][j][0]);
628  s->c.last_mv[i][j][0] = val;
629  s->c.mv[i][j][0] = val;
630  ff_tlog(s->c.avctx, "fmx=%d\n", val);
631  val = mpeg_decode_motion(s, s->c.mpeg_f_code[i][1],
632  s->c.last_mv[i][j][1] >> 1);
633  s->c.last_mv[i][j][1] = 2 * val;
634  s->c.mv[i][j][1] = val;
635  ff_tlog(s->c.avctx, "fmy=%d\n", val);
636  }
637  }
638  }
639  } else {
640  av_assert0(!s->c.progressive_sequence);
641  mb_type |= MB_TYPE_16x16 | MB_TYPE_INTERLACED;
642  for (i = 0; i < 2; i++) {
643  if (HAS_MV(mb_type, i)) {
644  s->c.field_select[i][0] = get_bits1(&s->gb);
645  for (k = 0; k < 2; k++) {
646  val = mpeg_decode_motion(s, s->c.mpeg_f_code[i][k],
647  s->c.last_mv[i][0][k]);
648  s->c.last_mv[i][0][k] = val;
649  s->c.last_mv[i][1][k] = val;
650  s->c.mv[i][0][k] = val;
651  }
652  }
653  }
654  }
655  break;
656  case MT_DMV:
657  if (s->c.progressive_sequence){
658  av_log(s->c.avctx, AV_LOG_ERROR, "MT_DMV in progressive_sequence\n");
659  return AVERROR_INVALIDDATA;
660  }
661  s->c.mv_type = MV_TYPE_DMV;
662  for (i = 0; i < 2; i++) {
663  if (HAS_MV(mb_type, i)) {
664  int dmx, dmy, mx, my, m;
665  const int my_shift = s->c.picture_structure == PICT_FRAME;
666 
667  mx = mpeg_decode_motion(s, s->c.mpeg_f_code[i][0],
668  s->c.last_mv[i][0][0]);
669  s->c.last_mv[i][0][0] = mx;
670  s->c.last_mv[i][1][0] = mx;
671  dmx = get_dmv(s);
672  my = mpeg_decode_motion(s, s->c.mpeg_f_code[i][1],
673  s->c.last_mv[i][0][1] >> my_shift);
674  dmy = get_dmv(s);
675 
676 
677  s->c.last_mv[i][0][1] = my * (1 << my_shift);
678  s->c.last_mv[i][1][1] = my * (1 << my_shift);
679 
680  s->c.mv[i][0][0] = mx;
681  s->c.mv[i][0][1] = my;
682  s->c.mv[i][1][0] = mx; // not used
683  s->c.mv[i][1][1] = my; // not used
684 
685  if (s->c.picture_structure == PICT_FRAME) {
686  mb_type |= MB_TYPE_16x16 | MB_TYPE_INTERLACED;
687 
688  // m = 1 + 2 * s->c.top_field_first;
689  m = s->c.top_field_first ? 1 : 3;
690 
691  /* top -> top pred */
692  s->c.mv[i][2][0] = ((mx * m + (mx > 0)) >> 1) + dmx;
693  s->c.mv[i][2][1] = ((my * m + (my > 0)) >> 1) + dmy - 1;
694  m = 4 - m;
695  s->c.mv[i][3][0] = ((mx * m + (mx > 0)) >> 1) + dmx;
696  s->c.mv[i][3][1] = ((my * m + (my > 0)) >> 1) + dmy + 1;
697  } else {
698  mb_type |= MB_TYPE_16x16;
699 
700  s->c.mv[i][2][0] = ((mx + (mx > 0)) >> 1) + dmx;
701  s->c.mv[i][2][1] = ((my + (my > 0)) >> 1) + dmy;
702  if (s->c.picture_structure == PICT_TOP_FIELD)
703  s->c.mv[i][2][1]--;
704  else
705  s->c.mv[i][2][1]++;
706  }
707  }
708  }
709  break;
710  default:
711  av_log(s->c.avctx, AV_LOG_ERROR,
712  "00 motion_type at %d %d\n", s->c.mb_x, s->c.mb_y);
713  return AVERROR_INVALIDDATA;
714  }
715  }
716 
717  s->c.mb_intra = 0;
718  s->last_dc[0] = s->last_dc[1] = s->last_dc[2] = 128 << s->c.intra_dc_precision;
719  if (HAS_CBP(mb_type)) {
720  s->c.bdsp.clear_blocks(s->block[0]);
721 
722  cbp = get_vlc2(&s->gb, ff_mb_pat_vlc, MB_PAT_VLC_BITS, 1);
723  if (mb_block_count > 6) {
724  cbp *= 1 << mb_block_count - 6;
725  cbp |= get_bits(&s->gb, mb_block_count - 6);
726  s->c.bdsp.clear_blocks(s->block[6]);
727  }
728  if (cbp <= 0) {
729  av_log(s->c.avctx, AV_LOG_ERROR,
730  "invalid cbp %d at %d %d\n", cbp, s->c.mb_x, s->c.mb_y);
731  return AVERROR_INVALIDDATA;
732  }
733 
734  if (s->c.codec_id == AV_CODEC_ID_MPEG2VIDEO) {
735  cbp <<= 12 - mb_block_count;
736 
737  for (i = 0; i < mb_block_count; i++) {
738  if (cbp & (1 << 11)) {
739  if ((ret = mpeg2_decode_block_non_intra(s, s->block[i], i)) < 0)
740  return ret;
741  } else {
742  s->c.block_last_index[i] = -1;
743  }
744  cbp += cbp;
745  }
746  } else {
747  for (i = 0; i < 6; i++) {
748  if (cbp & 32) {
749  if ((ret = mpeg1_decode_block_inter(s, s->block[i], i)) < 0)
750  return ret;
751  } else {
752  s->c.block_last_index[i] = -1;
753  }
754  cbp += cbp;
755  }
756  }
757  } else {
758  for (i = 0; i < 12; i++)
759  s->c.block_last_index[i] = -1;
760  }
761  }
762 
763  s->c.cur_pic.mb_type[s->c.mb_x + s->c.mb_y * s->c.mb_stride] = mb_type;
764 
765  return 0;
766 }
767 
769 {
770  Mpeg1Context *s = avctx->priv_data;
771  MPVContext *const s2 = &s->slice.c;
772  int ret;
773 
774  s2->slice_ctx_size = sizeof(s->slice);
775  s2->out_format = FMT_MPEG1;
776 
777  if ( avctx->codec_tag != AV_RL32("VCR2")
778  && avctx->codec_tag != AV_RL32("BW10"))
779  avctx->coded_width = avctx->coded_height = 0; // do not trust dimensions from input
780  ret = ff_mpv_decode_init(s2, avctx);
781  if (ret < 0)
782  return ret;
783 
785 
787  avctx->color_range = AVCOL_RANGE_MPEG;
788  return 0;
789 }
790 
792 #if CONFIG_MPEG1_NVDEC_HWACCEL
794 #endif
795 #if CONFIG_MPEG1_VDPAU_HWACCEL
797 #endif
800 };
801 
803 #if CONFIG_MPEG2_NVDEC_HWACCEL
805 #endif
806 #if CONFIG_MPEG2_VDPAU_HWACCEL
808 #endif
809 #if CONFIG_MPEG2_DXVA2_HWACCEL
811 #endif
812 #if CONFIG_MPEG2_D3D11VA_HWACCEL
815 #endif
816 #if CONFIG_MPEG2_D3D12VA_HWACCEL
818 #endif
819 #if CONFIG_MPEG2_VAAPI_HWACCEL
821 #endif
822 #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
824 #endif
827 };
828 
829 static const enum AVPixelFormat mpeg12_pixfmt_list_422[] = {
832 };
833 
834 static const enum AVPixelFormat mpeg12_pixfmt_list_444[] = {
837 };
838 
840 {
841  Mpeg1Context *s1 = avctx->priv_data;
842  MPVContext *const s = &s1->slice.c;
843  const enum AVPixelFormat *pix_fmts;
844 
845  if (CONFIG_GRAY && (avctx->flags & AV_CODEC_FLAG_GRAY))
846  return AV_PIX_FMT_GRAY8;
847 
848  if (s->chroma_format < CHROMA_422)
852  else if (s->chroma_format == CHROMA_422)
854  else
856 
857  return ff_get_format(avctx, pix_fmts);
858 }
859 
860 /* Call this function when we know all parameters.
861  * It may be called in different places for MPEG-1 and MPEG-2. */
863 {
864  Mpeg1Context *s1 = avctx->priv_data;
865  MPVContext *const s = &s1->slice.c;
866  int ret;
867 
868  if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
869  // MPEG-1 aspect
870  AVRational aspect_inv = av_d2q(ff_mpeg1_aspect[s1->aspect_ratio_info], 255);
871  avctx->sample_aspect_ratio = (AVRational) { aspect_inv.den, aspect_inv.num };
872  } else { // MPEG-2
873  // MPEG-2 aspect
874  if (s1->aspect_ratio_info > 1) {
875  AVRational dar =
877  (AVRational) { s1->pan_scan.width,
878  s1->pan_scan.height }),
879  (AVRational) { s->width, s->height });
880 
881  /* We ignore the spec here and guess a bit as reality does not
882  * match the spec, see for example res_change_ffmpeg_aspect.ts
883  * and sequence-display-aspect.mpg.
884  * issue1613, 621, 562 */
885  if ((s1->pan_scan.width == 0) || (s1->pan_scan.height == 0) ||
886  (av_cmp_q(dar, (AVRational) { 4, 3 }) &&
887  av_cmp_q(dar, (AVRational) { 16, 9 }))) {
888  s->avctx->sample_aspect_ratio =
890  (AVRational) { s->width, s->height });
891  } else {
892  s->avctx->sample_aspect_ratio =
894  (AVRational) { s1->pan_scan.width, s1->pan_scan.height });
895 // issue1613 4/3 16/9 -> 16/9
896 // res_change_ffmpeg_aspect.ts 4/3 225/44 ->4/3
897 // widescreen-issue562.mpg 4/3 16/9 -> 16/9
898 // s->avctx->sample_aspect_ratio = av_mul_q(s->avctx->sample_aspect_ratio, (AVRational) {s->width, s->height});
899  ff_dlog(avctx, "aspect A %d/%d\n",
902  ff_dlog(avctx, "aspect B %d/%d\n", s->avctx->sample_aspect_ratio.num,
903  s->avctx->sample_aspect_ratio.den);
904  }
905  } else {
906  s->avctx->sample_aspect_ratio =
908  }
909  } // MPEG-2
910 
911  if (av_image_check_sar(s->width, s->height,
912  avctx->sample_aspect_ratio) < 0) {
913  av_log(avctx, AV_LOG_WARNING, "ignoring invalid SAR: %u/%u\n",
914  avctx->sample_aspect_ratio.num,
915  avctx->sample_aspect_ratio.den);
916  avctx->sample_aspect_ratio = (AVRational){ 0, 1 };
917  }
918 
919  if (!s->context_initialized ||
920  avctx->coded_width != s->width ||
921  avctx->coded_height != s->height ||
922  s1->save_chroma_format != s->chroma_format ||
923  (s1->save_progressive_seq != s->progressive_sequence && FFALIGN(s->height, 16) != FFALIGN(s->height, 32)) ||
924  0) {
925  if (s->context_initialized)
927 
928  ret = ff_set_dimensions(avctx, s->width, s->height);
929  if (ret < 0)
930  return ret;
931 
932  if (avctx->codec_id == AV_CODEC_ID_MPEG2VIDEO && s1->bit_rate &&
933  (s1->bit_rate != 0x3FFFF*400)) {
934  avctx->rc_max_rate = s1->bit_rate;
935  } else if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO && s1->bit_rate &&
936  (s1->bit_rate != 0x3FFFF*400 || s1->vbv_delay != 0xFFFF)) {
937  avctx->bit_rate = s1->bit_rate;
938  }
939  s1->save_progressive_seq = s->progressive_sequence;
940  s1->save_chroma_format = s->chroma_format;
941 
942  /* low_delay may be forced, in this case we will have B-frames
943  * that behave like P-frames. */
944  avctx->has_b_frames = !s->low_delay;
945 
946  if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
947  // MPEG-1 fps
950  } else { // MPEG-2
951  // MPEG-2 fps
952  av_reduce(&s->avctx->framerate.num,
953  &s->avctx->framerate.den,
956  1 << 30);
957 
958  switch (s->chroma_format) {
960  case CHROMA_422:
962  default: av_assert0(0);
963  }
964  } // MPEG-2
965 
966  avctx->pix_fmt = mpeg_get_pixelformat(avctx);
967 
968  if ((ret = ff_mpv_common_init(s)) < 0)
969  return ret;
970  if (!s->avctx->lowres)
971  for (int i = 0; i < s->slice_context_count; i++)
972  ff_mpv_framesize_disable(&s->thread_context[i]->sc);
973  }
974  return 0;
975 }
976 
977 static int mpeg1_decode_picture(AVCodecContext *avctx, const uint8_t *buf,
978  int buf_size)
979 {
980  Mpeg1Context *s1 = avctx->priv_data;
981  MPVContext *const s = &s1->slice.c;
982  GetBitContext gb0, *const gb = &gb0;
983  int ref, f_code, vbv_delay, ret;
984 
985  ret = init_get_bits8(gb, buf, buf_size);
986  if (ret < 0)
987  return ret;
988 
989  ref = get_bits(gb, 10); /* temporal ref */
990  s->pict_type = get_bits(gb, 3);
991  if (s->pict_type == 0 || s->pict_type > 3)
992  return AVERROR_INVALIDDATA;
993 
994  vbv_delay = get_bits(gb, 16);
995  s1->vbv_delay = vbv_delay;
996  if (s->pict_type == AV_PICTURE_TYPE_P ||
997  s->pict_type == AV_PICTURE_TYPE_B) {
998  s->full_pel[0] = get_bits1(gb);
999  f_code = get_bits(gb, 3);
1000  if (f_code == 0 && (avctx->err_recognition & (AV_EF_BITSTREAM|AV_EF_COMPLIANT)))
1001  return AVERROR_INVALIDDATA;
1002  f_code += !f_code;
1003  s->mpeg_f_code[0][0] = f_code;
1004  s->mpeg_f_code[0][1] = f_code;
1005  }
1006  if (s->pict_type == AV_PICTURE_TYPE_B) {
1007  s->full_pel[1] = get_bits1(gb);
1008  f_code = get_bits(gb, 3);
1009  if (f_code == 0 && (avctx->err_recognition & (AV_EF_BITSTREAM|AV_EF_COMPLIANT)))
1010  return AVERROR_INVALIDDATA;
1011  f_code += !f_code;
1012  s->mpeg_f_code[1][0] = f_code;
1013  s->mpeg_f_code[1][1] = f_code;
1014  }
1015 
1016  if (avctx->debug & FF_DEBUG_PICT_INFO)
1017  av_log(avctx, AV_LOG_DEBUG,
1018  "vbv_delay %d, ref %d type:%d\n", vbv_delay, ref, s->pict_type);
1019 
1020  return 0;
1021 }
1022 
1024  GetBitContext *const gb)
1025 {
1026  MPVContext *const s = &s1->slice.c;
1027  int horiz_size_ext, vert_size_ext;
1028  int bit_rate_ext;
1029 
1030  skip_bits(gb, 1); /* profile and level esc*/
1031  s->avctx->profile = get_bits(gb, 3);
1032  s->avctx->level = get_bits(gb, 4);
1033  s->progressive_sequence = get_bits1(gb); /* progressive_sequence */
1034  s->chroma_format = get_bits(gb, 2); /* chroma_format 1=420, 2=422, 3=444 */
1035 
1036  if (!s->chroma_format) {
1037  s->chroma_format = CHROMA_420;
1038  av_log(s->avctx, AV_LOG_WARNING, "Chroma format invalid\n");
1039  }
1040 
1041  horiz_size_ext = get_bits(gb, 2);
1042  vert_size_ext = get_bits(gb, 2);
1043  s->width |= (horiz_size_ext << 12);
1044  s->height |= (vert_size_ext << 12);
1045  bit_rate_ext = get_bits(gb, 12); /* XXX: handle it */
1046  s1->bit_rate += (bit_rate_ext << 18) * 400LL;
1047  check_marker(s->avctx, gb, "after bit rate extension");
1048  s->avctx->rc_buffer_size += get_bits(gb, 8) * 1024 * 16 << 10;
1049 
1050  s->low_delay = get_bits1(gb);
1051  if (s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
1052  s->low_delay = 1;
1053 
1054  s1->frame_rate_ext.num = get_bits(gb, 2) + 1;
1055  s1->frame_rate_ext.den = get_bits(gb, 5) + 1;
1056 
1057  ff_dlog(s->avctx, "sequence extension\n");
1058  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG2VIDEO;
1059 
1060  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1061  av_log(s->avctx, AV_LOG_DEBUG,
1062  "profile: %d, level: %d ps: %d cf:%d vbv buffer: %d, bitrate:%"PRId64"\n",
1063  s->avctx->profile, s->avctx->level, s->progressive_sequence, s->chroma_format,
1064  s->avctx->rc_buffer_size, s1->bit_rate);
1065 }
1066 
1068  GetBitContext *const gb)
1069 {
1070  MPVContext *const s = &s1->slice.c;
1071  int color_description, w, h;
1072 
1073  skip_bits(gb, 3); /* video format */
1074  color_description = get_bits1(gb);
1075  if (color_description) {
1076  s->avctx->color_primaries = get_bits(gb, 8);
1077  s->avctx->color_trc = get_bits(gb, 8);
1078  s->avctx->colorspace = get_bits(gb, 8);
1079  }
1080  w = get_bits(gb, 14);
1081  skip_bits(gb, 1); // marker
1082  h = get_bits(gb, 14);
1083  // remaining 3 bits are zero padding
1084 
1085  s1->pan_scan.width = 16 * w;
1086  s1->pan_scan.height = 16 * h;
1087 
1088  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1089  av_log(s->avctx, AV_LOG_DEBUG, "sde w:%d, h:%d\n", w, h);
1090 }
1091 
1093  GetBitContext *const gb)
1094 {
1095  MPVContext *const s = &s1->slice.c;
1096  int i, nofco;
1097 
1098  nofco = 1;
1099  if (s->progressive_sequence) {
1100  if (s->repeat_first_field) {
1101  nofco++;
1102  if (s->top_field_first)
1103  nofco++;
1104  }
1105  } else {
1106  if (s->picture_structure == PICT_FRAME) {
1107  nofco++;
1108  if (s->repeat_first_field)
1109  nofco++;
1110  }
1111  }
1112  for (i = 0; i < nofco; i++) {
1113  s1->pan_scan.position[i][0] = get_sbits(gb, 16);
1114  skip_bits(gb, 1); // marker
1115  s1->pan_scan.position[i][1] = get_sbits(gb, 16);
1116  skip_bits(gb, 1); // marker
1117  }
1118 
1119  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1120  av_log(s->avctx, AV_LOG_DEBUG,
1121  "pde (%"PRId16",%"PRId16") (%"PRId16",%"PRId16") (%"PRId16",%"PRId16")\n",
1122  s1->pan_scan.position[0][0], s1->pan_scan.position[0][1],
1123  s1->pan_scan.position[1][0], s1->pan_scan.position[1][1],
1124  s1->pan_scan.position[2][0], s1->pan_scan.position[2][1]);
1125 }
1126 
1127 static int load_matrix(MPVContext *const s, GetBitContext *const gb,
1128  uint16_t matrix0[64], uint16_t matrix1[64], int intra)
1129 {
1130  int i;
1131 
1132  for (i = 0; i < 64; i++) {
1133  int j = s->idsp.idct_permutation[ff_zigzag_direct[i]];
1134  int v = get_bits(gb, 8);
1135  if (v == 0) {
1136  av_log(s->avctx, AV_LOG_ERROR, "matrix damaged\n");
1137  return AVERROR_INVALIDDATA;
1138  }
1139  if (intra && i == 0 && v != 8) {
1140  av_log(s->avctx, AV_LOG_DEBUG, "intra matrix specifies invalid DC quantizer %d, ignoring\n", v);
1141  v = 8; // needed by pink.mpg / issue1046
1142  }
1143  matrix0[j] = v;
1144  if (matrix1)
1145  matrix1[j] = v;
1146  }
1147  return 0;
1148 }
1149 
1151  GetBitContext *const gb)
1152 {
1153  ff_dlog(s->avctx, "matrix extension\n");
1154 
1155  if (get_bits1(gb))
1156  load_matrix(s, gb, s->chroma_intra_matrix, s->intra_matrix, 1);
1157  if (get_bits1(gb))
1158  load_matrix(s, gb, s->chroma_inter_matrix, s->inter_matrix, 0);
1159  if (get_bits1(gb))
1160  load_matrix(s, gb, s->chroma_intra_matrix, NULL, 1);
1161  if (get_bits1(gb))
1162  load_matrix(s, gb, s->chroma_inter_matrix, NULL, 0);
1163 }
1164 
1166  GetBitContext *const gb)
1167 {
1168  MPVContext *const s = &s1->slice.c;
1169 
1170  s->full_pel[0] = s->full_pel[1] = 0;
1171  s->mpeg_f_code[0][0] = get_bits(gb, 4);
1172  s->mpeg_f_code[0][1] = get_bits(gb, 4);
1173  s->mpeg_f_code[1][0] = get_bits(gb, 4);
1174  s->mpeg_f_code[1][1] = get_bits(gb, 4);
1175  s->mpeg_f_code[0][0] += !s->mpeg_f_code[0][0];
1176  s->mpeg_f_code[0][1] += !s->mpeg_f_code[0][1];
1177  s->mpeg_f_code[1][0] += !s->mpeg_f_code[1][0];
1178  s->mpeg_f_code[1][1] += !s->mpeg_f_code[1][1];
1179  if (!s->pict_type && s->context_initialized) {
1180  av_log(s->avctx, AV_LOG_ERROR, "Missing picture start code\n");
1181  if (s->avctx->err_recognition & AV_EF_EXPLODE)
1182  return AVERROR_INVALIDDATA;
1183  av_log(s->avctx, AV_LOG_WARNING, "Guessing pict_type from mpeg_f_code\n");
1184  if (s->mpeg_f_code[1][0] == 15 && s->mpeg_f_code[1][1] == 15) {
1185  if (s->mpeg_f_code[0][0] == 15 && s->mpeg_f_code[0][1] == 15)
1186  s->pict_type = AV_PICTURE_TYPE_I;
1187  else
1188  s->pict_type = AV_PICTURE_TYPE_P;
1189  } else
1190  s->pict_type = AV_PICTURE_TYPE_B;
1191  }
1192 
1193  s->intra_dc_precision = get_bits(gb, 2);
1194  s->picture_structure = get_bits(gb, 2);
1195  s->top_field_first = get_bits1(gb);
1196  s->frame_pred_frame_dct = get_bits1(gb);
1197  s->concealment_motion_vectors = get_bits1(gb);
1198  s->q_scale_type = get_bits1(gb);
1199  s->intra_vlc_format = get_bits1(gb);
1200  s->alternate_scan = get_bits1(gb);
1201  s->repeat_first_field = get_bits1(gb);
1202  s->chroma_420_type = get_bits1(gb);
1203  s->progressive_frame = get_bits1(gb);
1204 
1205  // We only initialize intra_scantable.permutated, as this is all we use.
1206  ff_permute_scantable(s->intra_scantable.permutated,
1207  s->alternate_scan ? ff_alternate_vertical_scan : ff_zigzag_direct,
1208  s->idsp.idct_permutation);
1209 
1210  /* composite display not parsed */
1211  ff_dlog(s->avctx, "intra_dc_precision=%d\n", s->intra_dc_precision);
1212  ff_dlog(s->avctx, "picture_structure=%d\n", s->picture_structure);
1213  ff_dlog(s->avctx, "top field first=%d\n", s->top_field_first);
1214  ff_dlog(s->avctx, "repeat first field=%d\n", s->repeat_first_field);
1215  ff_dlog(s->avctx, "conceal=%d\n", s->concealment_motion_vectors);
1216  ff_dlog(s->avctx, "intra_vlc_format=%d\n", s->intra_vlc_format);
1217  ff_dlog(s->avctx, "alternate_scan=%d\n", s->alternate_scan);
1218  ff_dlog(s->avctx, "frame_pred_frame_dct=%d\n", s->frame_pred_frame_dct);
1219  ff_dlog(s->avctx, "progressive_frame=%d\n", s->progressive_frame);
1220 
1221  return 0;
1222 }
1223 
1224 static int mpeg_field_start(Mpeg1Context *s1, const uint8_t *buf, int buf_size)
1225 {
1226  MPVContext *const s = &s1->slice.c;
1227  AVCodecContext *avctx = s->avctx;
1228  int second_field = 0;
1229  int ret;
1230 
1231  if (!(avctx->flags2 & AV_CODEC_FLAG2_CHUNKS)) {
1232  if (s->mb_width * s->mb_height * 11LL / (33 * 2 * 8) > buf_size)
1233  return AVERROR_INVALIDDATA;
1234  }
1235 
1236  /* start frame decoding */
1237  if (s->first_field || s->picture_structure == PICT_FRAME) {
1238  AVFrameSideData *pan_scan;
1239 
1240  if ((ret = ff_mpv_frame_start(s, avctx)) < 0)
1241  return ret;
1242 
1243  if (s->picture_structure != PICT_FRAME) {
1244  s->cur_pic.ptr->f->flags |= AV_FRAME_FLAG_TOP_FIELD_FIRST *
1245  (s->picture_structure == PICT_TOP_FIELD);
1246 
1247  for (int i = 0; i < 3; i++) {
1248  if (s->picture_structure == PICT_BOTTOM_FIELD) {
1249  s->cur_pic.data[i] = FF_PTR_ADD(s->cur_pic.data[i],
1250  s->cur_pic.linesize[i]);
1251  }
1252  s->cur_pic.linesize[i] *= 2;
1253  }
1254  }
1255 
1257 
1258  /* first check if we must repeat the frame */
1259  s->cur_pic.ptr->f->repeat_pict = 0;
1260  if (s->repeat_first_field) {
1261  if (s->progressive_sequence) {
1262  if (s->top_field_first)
1263  s->cur_pic.ptr->f->repeat_pict = 4;
1264  else
1265  s->cur_pic.ptr->f->repeat_pict = 2;
1266  } else if (s->progressive_frame) {
1267  s->cur_pic.ptr->f->repeat_pict = 1;
1268  }
1269  }
1270 
1271  ret = ff_frame_new_side_data(s->avctx, s->cur_pic.ptr->f,
1272  AV_FRAME_DATA_PANSCAN, sizeof(s1->pan_scan),
1273  &pan_scan);
1274  if (ret < 0)
1275  return ret;
1276  if (pan_scan)
1277  memcpy(pan_scan->data, &s1->pan_scan, sizeof(s1->pan_scan));
1278 
1279  if (s1->a53_buf_ref) {
1281  s->avctx, s->cur_pic.ptr->f, AV_FRAME_DATA_A53_CC,
1282  &s1->a53_buf_ref);
1283  if (ret < 0)
1284  return ret;
1285  }
1286 
1287  if (s1->has_stereo3d) {
1288  AVStereo3D *stereo = av_stereo3d_create_side_data(s->cur_pic.ptr->f);
1289  if (!stereo)
1290  return AVERROR(ENOMEM);
1291 
1292  stereo->type = s1->stereo3d_type;
1293  s1->has_stereo3d = 0;
1294  }
1295 
1296  if (s1->has_afd) {
1297  AVFrameSideData *sd;
1298  ret = ff_frame_new_side_data(s->avctx, s->cur_pic.ptr->f,
1299  AV_FRAME_DATA_AFD, 1, &sd);
1300  if (ret < 0)
1301  return ret;
1302  if (sd)
1303  *sd->data = s1->afd;
1304  s1->has_afd = 0;
1305  }
1306  } else { // second field
1307  second_field = 1;
1308  if (!s->cur_pic.ptr) {
1309  av_log(s->avctx, AV_LOG_ERROR, "first field missing\n");
1310  return AVERROR_INVALIDDATA;
1311  }
1312 
1313  if (s->avctx->hwaccel) {
1314  if ((ret = FF_HW_SIMPLE_CALL(s->avctx, end_frame)) < 0) {
1315  av_log(avctx, AV_LOG_ERROR,
1316  "hardware accelerator failed to decode first field\n");
1317  return ret;
1318  }
1319  }
1321  if (ret < 0)
1322  return ret;
1323 
1324  for (int i = 0; i < 3; i++) {
1325  s->cur_pic.data[i] = s->cur_pic.ptr->f->data[i];
1326  if (s->picture_structure == PICT_BOTTOM_FIELD)
1327  s->cur_pic.data[i] +=
1328  s->cur_pic.ptr->f->linesize[i];
1329  }
1330  }
1331 
1332  if (avctx->hwaccel) {
1333  if ((ret = FF_HW_CALL(avctx, start_frame, NULL, buf, buf_size)) < 0)
1334  return ret;
1335  } else if (s->codec_tag == MKTAG('V', 'C', 'R', '2')) {
1336  // Exchange UV
1337  FFSWAP(uint8_t*, s->cur_pic.data[1], s->cur_pic.data[2]);
1338  FFSWAP(ptrdiff_t, s->cur_pic.linesize[1], s->cur_pic.linesize[2]);
1339  if (!second_field) {
1340  FFSWAP(uint8_t*, s->next_pic.data[1], s->next_pic.data[2]);
1341  FFSWAP(ptrdiff_t, s->next_pic.linesize[1], s->next_pic.linesize[2]);
1342  FFSWAP(uint8_t*, s->last_pic.data[1], s->last_pic.data[2]);
1343  FFSWAP(ptrdiff_t, s->last_pic.linesize[1], s->last_pic.linesize[2]);
1344  }
1345  }
1346 
1347  return 0;
1348 }
1349 
1350 #define DECODE_SLICE_ERROR -1
1351 #define DECODE_SLICE_OK 0
1352 
1353 /**
1354  * Decode a slice.
1355  * Mpeg12SliceContext.c.mb_y must be set to the MB row from the startcode.
1356  * @return DECODE_SLICE_ERROR if the slice is damaged,
1357  * DECODE_SLICE_OK if this slice is OK
1358  */
1359 static int mpeg_decode_slice(Mpeg12SliceContext *const s, int mb_y,
1360  const uint8_t **buf, int buf_size)
1361 {
1362  AVCodecContext *avctx = s->c.avctx;
1363  const int lowres = s->c.avctx->lowres;
1364  const int field_pic = s->c.picture_structure != PICT_FRAME;
1365  int ret;
1366 
1367  s->c.resync_mb_x =
1368  s->c.resync_mb_y = -1;
1369 
1370  av_assert0(mb_y < s->c.mb_height);
1371 
1372  ret = init_get_bits8(&s->gb, *buf, buf_size);
1373  if (ret < 0)
1374  return ret;
1375 
1376  if (s->c.codec_id != AV_CODEC_ID_MPEG1VIDEO && s->c.mb_height > 2800/16)
1377  skip_bits(&s->gb, 3);
1378 
1379  s->c.interlaced_dct = 0;
1380 
1381  s->c.qscale = mpeg_get_qscale(&s->gb, s->c.q_scale_type);
1382 
1383  if (s->c.qscale == 0) {
1384  av_log(s->c.avctx, AV_LOG_ERROR, "qscale == 0\n");
1385  return AVERROR_INVALIDDATA;
1386  }
1387 
1388  /* extra slice info */
1389  if (skip_1stop_8data_bits(&s->gb) < 0)
1390  return AVERROR_INVALIDDATA;
1391 
1392  s->c.mb_x = 0;
1393 
1394  if (mb_y == 0 && s->c.codec_tag == AV_RL32("SLIF")) {
1395  skip_bits1(&s->gb);
1396  } else {
1397  while (get_bits_left(&s->gb) > 0) {
1398  int code = get_vlc2(&s->gb, ff_mbincr_vlc,
1399  MBINCR_VLC_BITS, 2);
1400  if (code < 0) {
1401  av_log(s->c.avctx, AV_LOG_ERROR, "first mb_incr damaged\n");
1402  return AVERROR_INVALIDDATA;
1403  }
1404  if (code >= 33) {
1405  if (code == 33)
1406  s->c.mb_x += 33;
1407  /* otherwise, stuffing, nothing to do */
1408  } else {
1409  s->c.mb_x += code;
1410  break;
1411  }
1412  }
1413  }
1414 
1415  if (s->c.mb_x >= (unsigned) s->c.mb_width) {
1416  av_log(s->c.avctx, AV_LOG_ERROR, "initial skip overflow\n");
1417  return AVERROR_INVALIDDATA;
1418  }
1419 
1420  if (avctx->hwaccel) {
1421  const uint8_t *buf_end, *buf_start = *buf - 4; /* include start_code */
1422  int start_code = -1;
1423  buf_end = avpriv_find_start_code(buf_start + 2, *buf + buf_size, &start_code);
1424  if (buf_end < *buf + buf_size)
1425  buf_end -= 4;
1426  s->c.mb_y = mb_y;
1427  if (FF_HW_CALL(avctx, decode_slice, buf_start, buf_end - buf_start) < 0)
1428  return DECODE_SLICE_ERROR;
1429  *buf = buf_end;
1430  return DECODE_SLICE_OK;
1431  }
1432 
1433  s->c.resync_mb_x = s->c.mb_x;
1434  s->c.resync_mb_y = s->c.mb_y = mb_y;
1435  ff_init_block_index(&s->c);
1436 
1437  if (s->c.mb_y == 0 && s->c.mb_x == 0 && (s->c.first_field || s->c.picture_structure == PICT_FRAME)) {
1438  if (s->c.avctx->debug & FF_DEBUG_PICT_INFO) {
1439  av_log(s->c.avctx, AV_LOG_DEBUG,
1440  "qp:%d fc:%2d%2d%2d%2d %c %s %s %s %s dc:%d pstruct:%d fdct:%d cmv:%d qtype:%d ivlc:%d rff:%d %s\n",
1441  s->c.qscale,
1442  s->c.mpeg_f_code[0][0], s->c.mpeg_f_code[0][1],
1443  s->c.mpeg_f_code[1][0], s->c.mpeg_f_code[1][1],
1444  s->c.pict_type == AV_PICTURE_TYPE_I ? 'I' :
1445  (s->c.pict_type == AV_PICTURE_TYPE_P ? 'P' :
1446  (s->c.pict_type == AV_PICTURE_TYPE_B ? 'B' : 'S')),
1447  s->c.progressive_sequence ? "ps" : "",
1448  s->c.progressive_frame ? "pf" : "",
1449  s->c.alternate_scan ? "alt" : "",
1450  s->c.top_field_first ? "top" : "",
1451  s->c.intra_dc_precision, s->c.picture_structure,
1452  s->c.frame_pred_frame_dct, s->c.concealment_motion_vectors,
1453  s->c.q_scale_type, s->c.intra_vlc_format,
1454  s->c.repeat_first_field, s->c.chroma_420_type ? "420" : "");
1455  }
1456  }
1457 
1458  s->last_dc[0] = 128 << s->c.intra_dc_precision;
1459  s->last_dc[1] = s->last_dc[0];
1460  s->last_dc[2] = s->last_dc[0];
1461  memset(s->c.last_mv, 0, sizeof(s->c.last_mv));
1462 
1463  for (int mb_skip_run = 0;;) {
1464  ret = mpeg_decode_mb(s, &mb_skip_run);
1465  if (ret < 0)
1466  return ret;
1467 
1468  // Note motion_val is normally NULL unless we want to extract the MVs.
1469  if (s->c.cur_pic.motion_val[0]) {
1470  const int wrap = s->c.b8_stride;
1471  int xy = s->c.mb_x * 2 + s->c.mb_y * 2 * wrap;
1472  int b8_xy = 4 * (s->c.mb_x + s->c.mb_y * s->c.mb_stride);
1473  int motion_x, motion_y, dir, i;
1474 
1475  for (i = 0; i < 2; i++) {
1476  for (dir = 0; dir < 2; dir++) {
1477  if (s->c.mb_intra ||
1478  (dir == 1 && s->c.pict_type != AV_PICTURE_TYPE_B)) {
1479  motion_x = motion_y = 0;
1480  } else if (s->c.mv_type == MV_TYPE_16X16 ||
1481  (s->c.mv_type == MV_TYPE_FIELD && field_pic)) {
1482  motion_x = s->c.mv[dir][0][0];
1483  motion_y = s->c.mv[dir][0][1];
1484  } else { /* if ((s->c.mv_type == MV_TYPE_FIELD) || (s->c.mv_type == MV_TYPE_16X8)) */
1485  motion_x = s->c.mv[dir][i][0];
1486  motion_y = s->c.mv[dir][i][1];
1487  }
1488 
1489  s->c.cur_pic.motion_val[dir][xy][0] = motion_x;
1490  s->c.cur_pic.motion_val[dir][xy][1] = motion_y;
1491  s->c.cur_pic.motion_val[dir][xy + 1][0] = motion_x;
1492  s->c.cur_pic.motion_val[dir][xy + 1][1] = motion_y;
1493  s->c.cur_pic.ref_index [dir][b8_xy] =
1494  s->c.cur_pic.ref_index [dir][b8_xy + 1] = s->c.field_select[dir][i];
1495  av_assert2(s->c.field_select[dir][i] == 0 ||
1496  s->c.field_select[dir][i] == 1);
1497  }
1498  xy += wrap;
1499  b8_xy += 2;
1500  }
1501  }
1502 
1503  s->c.dest[0] += 16 >> lowres;
1504  s->c.dest[1] +=(16 >> lowres) >> s->c.chroma_x_shift;
1505  s->c.dest[2] +=(16 >> lowres) >> s->c.chroma_x_shift;
1506 
1507  ff_mpv_reconstruct_mb(&s->c, s->block);
1508 
1509  if (++s->c.mb_x >= s->c.mb_width) {
1510  const int mb_size = 16 >> s->c.avctx->lowres;
1511  int left;
1512 
1513  ff_mpeg_draw_horiz_band(&s->c, mb_size * (s->c.mb_y >> field_pic), mb_size);
1514 
1515  s->c.mb_x = 0;
1516  s->c.mb_y += 1 << field_pic;
1517 
1518  if (s->c.mb_y >= s->c.mb_height) {
1519  int left = get_bits_left(&s->gb);
1520  int is_d10 = s->c.chroma_format == CHROMA_422 &&
1521  s->c.pict_type == AV_PICTURE_TYPE_I &&
1522  avctx->profile == 0 && avctx->level == 5 &&
1523  s->c.intra_dc_precision == 2 &&
1524  s->c.q_scale_type == 1 && s->c.alternate_scan == 0 &&
1525  s->c.progressive_frame == 0
1526  /* vbv_delay == 0xBBB || 0xE10 */;
1527 
1528  if (left >= 32 && !is_d10) {
1529  GetBitContext gb = s->gb;
1530  align_get_bits(&gb);
1531  if (show_bits(&gb, 24) == 0x060E2B) {
1532  av_log(avctx, AV_LOG_DEBUG, "Invalid MXF data found in video stream\n");
1533  is_d10 = 1;
1534  }
1535  if (left > 32 && show_bits_long(&gb, 32) == 0x201) {
1536  av_log(avctx, AV_LOG_DEBUG, "skipping m704 alpha (unsupported)\n");
1537  goto eos;
1538  }
1539  }
1540 
1541  if (left < 0 ||
1542  (left && show_bits(&s->gb, FFMIN(left, 23)) && !is_d10) ||
1543  ((avctx->err_recognition & (AV_EF_BITSTREAM | AV_EF_AGGRESSIVE)) && left > 8)) {
1544  av_log(avctx, AV_LOG_ERROR, "end mismatch left=%d %0X at %d %d\n",
1545  left, left>0 ? show_bits(&s->gb, FFMIN(left, 23)) : 0, s->c.mb_x, s->c.mb_y);
1546  return AVERROR_INVALIDDATA;
1547  } else
1548  goto eos;
1549  }
1550  // There are some files out there which are missing the last slice
1551  // in cases where the slice is completely outside the visible
1552  // area, we detect this here instead of running into the end expecting
1553  // more data
1554  left = get_bits_left(&s->gb);
1555  if (s->c.mb_y >= ((s->c.height + 15) >> 4) &&
1556  !s->c.progressive_sequence &&
1557  left <= 25 &&
1558  left >= 0 &&
1559  mb_skip_run == -1 &&
1560  (!left || show_bits(&s->gb, left) == 0))
1561  goto eos;
1562 
1563  ff_init_block_index(&s->c);
1564  }
1565 
1566  /* skip mb handling */
1567  if (mb_skip_run == -1) {
1568  /* read increment again */
1569  mb_skip_run = 0;
1570  for (;;) {
1571  int code = get_vlc2(&s->gb, ff_mbincr_vlc,
1572  MBINCR_VLC_BITS, 2);
1573  if (code < 0) {
1574  av_log(s->c.avctx, AV_LOG_ERROR, "mb incr damaged\n");
1575  return AVERROR_INVALIDDATA;
1576  }
1577  if (code >= 33) {
1578  if (code == 33) {
1579  mb_skip_run += 33;
1580  } else if (code == 35) {
1581  if (mb_skip_run != 0 || show_bits(&s->gb, 15) != 0) {
1582  av_log(s->c.avctx, AV_LOG_ERROR, "slice mismatch\n");
1583  return AVERROR_INVALIDDATA;
1584  }
1585  goto eos; /* end of slice */
1586  }
1587  /* otherwise, stuffing, nothing to do */
1588  } else {
1589  mb_skip_run += code;
1590  break;
1591  }
1592  }
1593  if (mb_skip_run) {
1594  int i;
1595  if (s->c.pict_type == AV_PICTURE_TYPE_I) {
1596  av_log(s->c.avctx, AV_LOG_ERROR,
1597  "skipped MB in I-frame at %d %d\n", s->c.mb_x, s->c.mb_y);
1598  return AVERROR_INVALIDDATA;
1599  }
1600 
1601  /* skip mb */
1602  s->c.mb_intra = 0;
1603  for (i = 0; i < 12; i++)
1604  s->c.block_last_index[i] = -1;
1605  s->last_dc[0] = s->last_dc[1] = s->last_dc[2] = 128 << s->c.intra_dc_precision;
1606  if (s->c.picture_structure == PICT_FRAME)
1607  s->c.mv_type = MV_TYPE_16X16;
1608  else
1609  s->c.mv_type = MV_TYPE_FIELD;
1610  if (s->c.pict_type == AV_PICTURE_TYPE_P) {
1611  /* if P type, zero motion vector is implied */
1612  s->c.mv_dir = MV_DIR_FORWARD;
1613  s->c.mv[0][0][0] = s->c.mv[0][0][1] = 0;
1614  s->c.last_mv[0][0][0] = s->c.last_mv[0][0][1] = 0;
1615  s->c.last_mv[0][1][0] = s->c.last_mv[0][1][1] = 0;
1616  s->c.field_select[0][0] = (s->c.picture_structure - 1) & 1;
1617  } else {
1618  /* if B type, reuse previous vectors and directions */
1619  s->c.mv[0][0][0] = s->c.last_mv[0][0][0];
1620  s->c.mv[0][0][1] = s->c.last_mv[0][0][1];
1621  s->c.mv[1][0][0] = s->c.last_mv[1][0][0];
1622  s->c.mv[1][0][1] = s->c.last_mv[1][0][1];
1623  s->c.field_select[0][0] = (s->c.picture_structure - 1) & 1;
1624  s->c.field_select[1][0] = (s->c.picture_structure - 1) & 1;
1625  }
1626  }
1627  }
1628  }
1629 eos: // end of slice
1630  if (get_bits_left(&s->gb) < 0) {
1631  av_log(s->c.avctx, AV_LOG_ERROR, "overread %d\n", -get_bits_left(&s->gb));
1632  return AVERROR_INVALIDDATA;
1633  }
1634  *buf += (get_bits_count(&s->gb) - 1) / 8;
1635  ff_dlog(s->c.avctx, "Slice start:%d %d end:%d %d\n", s->c.resync_mb_x, s->c.resync_mb_y, s->c.mb_x, s->c.mb_y);
1636  return 0;
1637 }
1638 
1640 {
1641  Mpeg12SliceContext *const s = *(void **) arg;
1642  const uint8_t *buf = s->gb.buffer;
1643  const uint8_t *end = buf + get_bits_bytesize(&s->gb, 0);
1644  int mb_y = s->c.start_mb_y;
1645  const int field_pic = s->c.picture_structure != PICT_FRAME;
1646 
1647  s->c.er.error_count = (3 * (s->c.end_mb_y - s->c.start_mb_y) * s->c.mb_width) >> field_pic;
1648 
1649  for (;;) {
1650  uint32_t start_code;
1651  int ret;
1652 
1653  ret = mpeg_decode_slice(s, mb_y, &buf, end - buf);
1654  emms_c();
1655  ff_dlog(c, "ret:%d resync:%d/%d mb:%d/%d ts:%d/%d ec:%d\n",
1656  ret, s->c.resync_mb_x, s->c.resync_mb_y, s->c.mb_x, s->c.mb_y,
1657  s->c.start_mb_y, s->c.end_mb_y, s->c.er.error_count);
1658  if (ret < 0) {
1659  if (c->err_recognition & AV_EF_EXPLODE)
1660  return ret;
1661  if (s->c.resync_mb_x >= 0 && s->c.resync_mb_y >= 0)
1662  ff_er_add_slice(&s->c.er, s->c.resync_mb_x, s->c.resync_mb_y,
1663  s->c.mb_x, s->c.mb_y,
1665  } else {
1666  ff_er_add_slice(&s->c.er, s->c.resync_mb_x, s->c.resync_mb_y,
1667  s->c.mb_x - 1, s->c.mb_y,
1669  }
1670 
1671  if (s->c.mb_y == s->c.end_mb_y)
1672  return 0;
1673 
1674  start_code = -1;
1675  buf = avpriv_find_start_code(buf, end, &start_code);
1676  if (start_code < SLICE_MIN_START_CODE || start_code > SLICE_MAX_START_CODE)
1677  return AVERROR_INVALIDDATA;
1679  if (s->c.codec_id != AV_CODEC_ID_MPEG1VIDEO && s->c.mb_height > 2800/16)
1680  mb_y += (*buf&0xE0)<<2;
1681  mb_y <<= field_pic;
1682  if (s->c.picture_structure == PICT_BOTTOM_FIELD)
1683  mb_y++;
1684  if (mb_y >= s->c.end_mb_y)
1685  return AVERROR_INVALIDDATA;
1686  }
1687 }
1688 
1689 /**
1690  * Handle slice ends.
1691  * @return 1 if it seems to be the last slice
1692  */
1693 static int slice_end(AVCodecContext *avctx, AVFrame *pict, int *got_output)
1694 {
1695  Mpeg1Context *s1 = avctx->priv_data;
1696  MPVContext *const s = &s1->slice.c;
1697 
1698  if (!s->context_initialized || !s->cur_pic.ptr)
1699  return 0;
1700 
1701  if (s->avctx->hwaccel) {
1702  int ret = FF_HW_SIMPLE_CALL(s->avctx, end_frame);
1703  if (ret < 0) {
1704  av_log(avctx, AV_LOG_ERROR,
1705  "hardware accelerator failed to decode picture\n");
1706  return ret;
1707  }
1708  }
1709 
1710  /* end of slice reached */
1711  if (/* s->mb_y << field_pic == s->mb_height && */ !s->first_field && !s1->first_slice) {
1712  /* end of image */
1713 
1714  ff_er_frame_end(&s->er, NULL);
1715 
1717 
1718  if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
1719  int ret = av_frame_ref(pict, s->cur_pic.ptr->f);
1720  if (ret < 0)
1721  return ret;
1722  ff_print_debug_info(s, s->cur_pic.ptr, pict);
1723  ff_mpv_export_qp_table(s, pict, s->cur_pic.ptr, FF_MPV_QSCALE_TYPE_MPEG2);
1724  *got_output = 1;
1725  } else {
1726  /* latency of 1 frame for I- and P-frames */
1727  if (s->last_pic.ptr && !s->last_pic.ptr->dummy) {
1728  int ret = av_frame_ref(pict, s->last_pic.ptr->f);
1729  if (ret < 0)
1730  return ret;
1731  ff_print_debug_info(s, s->last_pic.ptr, pict);
1732  ff_mpv_export_qp_table(s, pict, s->last_pic.ptr, FF_MPV_QSCALE_TYPE_MPEG2);
1733  *got_output = 1;
1734  }
1735  }
1736 
1737  return 1;
1738  } else {
1739  return 0;
1740  }
1741 }
1742 
1744  const uint8_t *buf, int buf_size)
1745 {
1746  Mpeg1Context *s1 = avctx->priv_data;
1747  MPVContext *const s = &s1->slice.c;
1748  GetBitContext gb0, *const gb = &gb0;
1749  int width, height;
1750  int i, v, j;
1751 
1752  int ret = init_get_bits8(gb, buf, buf_size);
1753  if (ret < 0)
1754  return ret;
1755 
1756  width = get_bits(gb, 12);
1757  height = get_bits(gb, 12);
1758  if (width == 0 || height == 0) {
1759  av_log(avctx, AV_LOG_WARNING,
1760  "Invalid horizontal or vertical size value.\n");
1762  return AVERROR_INVALIDDATA;
1763  }
1764  s1->aspect_ratio_info = get_bits(gb, 4);
1765  if (s1->aspect_ratio_info == 0) {
1766  av_log(avctx, AV_LOG_ERROR, "aspect ratio has forbidden 0 value\n");
1768  return AVERROR_INVALIDDATA;
1769  }
1770  s1->frame_rate_index = get_bits(gb, 4);
1771  if (s1->frame_rate_index == 0 || s1->frame_rate_index > 13) {
1772  av_log(avctx, AV_LOG_WARNING,
1773  "frame_rate_index %d is invalid\n", s1->frame_rate_index);
1774  s1->frame_rate_index = 1;
1775  }
1776  s1->bit_rate = get_bits(gb, 18) * 400;
1777  if (check_marker(s->avctx, gb, "in sequence header") == 0) {
1778  return AVERROR_INVALIDDATA;
1779  }
1780 
1781  s->avctx->rc_buffer_size = get_bits(gb, 10) * 1024 * 16;
1782  skip_bits(gb, 1);
1783 
1784  /* get matrix */
1785  if (get_bits1(gb)) {
1786  load_matrix(s, gb, s->chroma_intra_matrix, s->intra_matrix, 1);
1787  } else {
1788  for (i = 0; i < 64; i++) {
1789  j = s->idsp.idct_permutation[i];
1791  s->intra_matrix[j] = v;
1792  s->chroma_intra_matrix[j] = v;
1793  }
1794  }
1795  if (get_bits1(gb)) {
1796  load_matrix(s, gb, s->chroma_inter_matrix, s->inter_matrix, 0);
1797  } else {
1798  for (i = 0; i < 64; i++) {
1799  int j = s->idsp.idct_permutation[i];
1801  s->inter_matrix[j] = v;
1802  s->chroma_inter_matrix[j] = v;
1803  }
1804  }
1805 
1806  if (show_bits(gb, 23) != 0) {
1807  av_log(s->avctx, AV_LOG_ERROR, "sequence header damaged\n");
1808  return AVERROR_INVALIDDATA;
1809  }
1810 
1811  s->width = width;
1812  s->height = height;
1813 
1814  /* We set MPEG-2 parameters so that it emulates MPEG-1. */
1815  s->progressive_sequence = 1;
1816  s->progressive_frame = 1;
1817  s->picture_structure = PICT_FRAME;
1818  s->first_field = 0;
1819  s->frame_pred_frame_dct = 1;
1820  s->chroma_format = CHROMA_420;
1821  s->codec_id =
1822  s->avctx->codec_id = AV_CODEC_ID_MPEG1VIDEO;
1823  if (s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
1824  s->low_delay = 1;
1825 
1826  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1827  av_log(s->avctx, AV_LOG_DEBUG, "vbv buffer: %d, bitrate:%"PRId64", aspect_ratio_info: %d \n",
1828  s->avctx->rc_buffer_size, s1->bit_rate, s1->aspect_ratio_info);
1829 
1830  return 0;
1831 }
1832 
1834 {
1835  Mpeg1Context *s1 = avctx->priv_data;
1836  MPVContext *const s = &s1->slice.c;
1837  int i, v, ret;
1838 
1839  /* start new MPEG-1 context decoding */
1840  if (s->context_initialized)
1842 
1843  s->width = avctx->coded_width;
1844  s->height = avctx->coded_height;
1845  avctx->has_b_frames = 0; // true?
1846  s->low_delay = 1;
1847 
1848  avctx->pix_fmt = mpeg_get_pixelformat(avctx);
1849 
1850  if ((ret = ff_mpv_common_init(s)) < 0)
1851  return ret;
1852  if (!s->avctx->lowres)
1853  for (int i = 0; i < s->slice_context_count; i++)
1854  ff_mpv_framesize_disable(&s->thread_context[i]->sc);
1855 
1856  for (i = 0; i < 64; i++) {
1857  int j = s->idsp.idct_permutation[i];
1859  s->intra_matrix[j] = v;
1860  s->chroma_intra_matrix[j] = v;
1861 
1863  s->inter_matrix[j] = v;
1864  s->chroma_inter_matrix[j] = v;
1865  }
1866 
1867  s->progressive_sequence = 1;
1868  s->progressive_frame = 1;
1869  s->picture_structure = PICT_FRAME;
1870  s->first_field = 0;
1871  s->frame_pred_frame_dct = 1;
1872  s->chroma_format = CHROMA_420;
1873  if (s->codec_tag == AV_RL32("BW10")) {
1874  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG1VIDEO;
1875  } else {
1876  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG2VIDEO;
1877  }
1878  s1->save_progressive_seq = s->progressive_sequence;
1879  s1->save_chroma_format = s->chroma_format;
1880  return 0;
1881 }
1882 
1884  const char *label)
1885 {
1886  Mpeg1Context *s1 = avctx->priv_data;
1887 
1889 
1890  if (!s1->cc_format) {
1891  s1->cc_format = format;
1892 
1893  av_log(avctx, AV_LOG_DEBUG, "CC: first seen substream is %s format\n", label);
1894  }
1895 
1896 #if FF_API_CODEC_PROPS
1900 #endif
1901 }
1902 
1904  const uint8_t *p, int buf_size)
1905 {
1906  Mpeg1Context *s1 = avctx->priv_data;
1907 
1908  if ((!s1->cc_format || s1->cc_format == CC_FORMAT_A53_PART4) &&
1909  buf_size >= 6 &&
1910  p[0] == 'G' && p[1] == 'A' && p[2] == '9' && p[3] == '4' &&
1911  p[4] == 3 && (p[5] & 0x40)) {
1912  /* extract A53 Part 4 CC data */
1913  int cc_count = p[5] & 0x1f;
1914  if (cc_count > 0 && buf_size >= 7 + cc_count * 3) {
1915  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
1916  const uint64_t new_size = (old_size + cc_count
1917  * UINT64_C(3));
1918  int ret;
1919 
1920  if (new_size > 3*A53_MAX_CC_COUNT)
1921  return AVERROR(EINVAL);
1922 
1923  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
1924  if (ret >= 0)
1925  memcpy(s1->a53_buf_ref->data + old_size, p + 7, cc_count * UINT64_C(3));
1926 
1927  mpeg_set_cc_format(avctx, CC_FORMAT_A53_PART4, "A/53 Part 4");
1928  }
1929  return 1;
1930  } else if ((!s1->cc_format || s1->cc_format == CC_FORMAT_SCTE20) &&
1931  buf_size >= 2 &&
1932  p[0] == 0x03 && (p[1]&0x7f) == 0x01) {
1933  /* extract SCTE-20 CC data */
1934  GetBitContext gb;
1935  int cc_count = 0;
1936  int i, ret;
1937 
1938  ret = init_get_bits8(&gb, p + 2, buf_size - 2);
1939  if (ret < 0)
1940  return ret;
1941  cc_count = get_bits(&gb, 5);
1942  if (cc_count > 0) {
1943  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
1944  const uint64_t new_size = (old_size + cc_count
1945  * UINT64_C(3));
1946  if (new_size > 3*A53_MAX_CC_COUNT)
1947  return AVERROR(EINVAL);
1948 
1949  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
1950  if (ret >= 0) {
1951  uint8_t field, cc1, cc2;
1952  uint8_t *cap = s1->a53_buf_ref->data + old_size;
1953 
1954  memset(cap, 0, cc_count * 3);
1955  for (i = 0; i < cc_count && get_bits_left(&gb) >= 26; i++) {
1956  skip_bits(&gb, 2); // priority
1957  field = get_bits(&gb, 2);
1958  skip_bits(&gb, 5); // line_offset
1959  cc1 = get_bits(&gb, 8);
1960  cc2 = get_bits(&gb, 8);
1961  skip_bits(&gb, 1); // marker
1962 
1963  if (!field) { // forbidden
1964  cap[0] = cap[1] = cap[2] = 0x00;
1965  } else {
1966  field = (field == 2 ? 1 : 0);
1967  if (!s1->slice.c.top_field_first) field = !field;
1968  cap[0] = 0x04 | field;
1969  cap[1] = ff_reverse[cc1];
1970  cap[2] = ff_reverse[cc2];
1971  }
1972  cap += 3;
1973  }
1974  }
1975 
1976  mpeg_set_cc_format(avctx, CC_FORMAT_SCTE20, "SCTE-20");
1977  }
1978  return 1;
1979  } else if ((!s1->cc_format || s1->cc_format == CC_FORMAT_DVD) &&
1980  buf_size >= 11 &&
1981  p[0] == 'C' && p[1] == 'C' && p[2] == 0x01 && p[3] == 0xf8) {
1982  /* extract DVD CC data
1983  *
1984  * uint32_t user_data_start_code 0x000001B2 (big endian)
1985  * uint16_t user_identifier 0x4343 "CC"
1986  * uint8_t user_data_type_code 0x01
1987  * uint8_t caption_block_size 0xF8
1988  * uint8_t
1989  * bit 7 caption_odd_field_first 1=odd field (CC1/CC2) first 0=even field (CC3/CC4) first
1990  * bit 6 caption_filler 0
1991  * bit 5:1 caption_block_count number of caption blocks (pairs of caption words = frames). Most DVDs use 15 per start of GOP.
1992  * bit 0 caption_extra_field_added 1=one additional caption word
1993  *
1994  * struct caption_field_block {
1995  * uint8_t
1996  * bit 7:1 caption_filler 0x7F (all 1s)
1997  * bit 0 caption_field_odd 1=odd field (this is CC1/CC2) 0=even field (this is CC3/CC4)
1998  * uint8_t caption_first_byte
1999  * uint8_t caption_second_byte
2000  * } caption_block[(caption_block_count * 2) + caption_extra_field_added];
2001  *
2002  * Some DVDs encode caption data for both fields with caption_field_odd=1. The only way to decode the fields
2003  * correctly is to start on the field indicated by caption_odd_field_first and count between odd/even fields.
2004  * Don't assume that the first caption word is the odd field. There do exist MPEG files in the wild that start
2005  * on the even field. There also exist DVDs in the wild that encode an odd field count and the
2006  * caption_extra_field_added/caption_odd_field_first bits change per packet to allow that. */
2007  int cc_count = 0;
2008  int i, ret;
2009  // There is a caption count field in the data, but it is often
2010  // incorrect. So count the number of captions present.
2011  for (i = 5; i + 6 <= buf_size && ((p[i] & 0xfe) == 0xfe); i += 6)
2012  cc_count++;
2013  // Transform the DVD format into A53 Part 4 format
2014  if (cc_count > 0) {
2015  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
2016  const uint64_t new_size = (old_size + cc_count
2017  * UINT64_C(6));
2018  if (new_size > 3*A53_MAX_CC_COUNT)
2019  return AVERROR(EINVAL);
2020 
2021  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
2022  if (ret >= 0) {
2023  uint8_t field1 = !!(p[4] & 0x80);
2024  uint8_t *cap = s1->a53_buf_ref->data + old_size;
2025  p += 5;
2026  for (i = 0; i < cc_count; i++) {
2027  cap[0] = (p[0] == 0xff && field1) ? 0xfc : 0xfd;
2028  cap[1] = p[1];
2029  cap[2] = p[2];
2030  cap[3] = (p[3] == 0xff && !field1) ? 0xfc : 0xfd;
2031  cap[4] = p[4];
2032  cap[5] = p[5];
2033  cap += 6;
2034  p += 6;
2035  }
2036  }
2037 
2038  mpeg_set_cc_format(avctx, CC_FORMAT_DVD, "DVD");
2039  }
2040  return 1;
2041  } else if ((!s1->cc_format || s1->cc_format == CC_FORMAT_DISH) &&
2042  buf_size >= 12 &&
2043  p[0] == 0x05 && p[1] == 0x02) {
2044  /* extract Dish Network CC data */
2045  const uint8_t cc_header = 0xf8 | 0x04 /* valid */ | 0x00 /* line 21 field 1 */;
2046  uint8_t cc_data[4] = {0};
2047  int cc_count = 0;
2048  uint8_t cc_type = p[7];
2049  p += 8;
2050  buf_size -= 8;
2051 
2052  if (cc_type == 0x05 && buf_size >= 7) {
2053  cc_type = p[6];
2054  p += 7;
2055  buf_size -= 7;
2056  }
2057 
2058  if (cc_type == 0x02 && buf_size >= 4) { /* 2-byte caption, can be repeated */
2059  cc_count = 1;
2060  cc_data[0] = p[1];
2061  cc_data[1] = p[2];
2062  cc_type = p[3];
2063 
2064  /* Only repeat characters when the next type flag
2065  * is 0x04 and the characters are repeatable (i.e., less than
2066  * 32 with the parity stripped).
2067  */
2068  if (cc_type == 0x04 && (cc_data[0] & 0x7f) < 32) {
2069  cc_count = 2;
2070  cc_data[2] = cc_data[0];
2071  cc_data[3] = cc_data[1];
2072  }
2073  } else if (cc_type == 0x04 && buf_size >= 5) { /* 4-byte caption, not repeated */
2074  cc_count = 2;
2075  cc_data[0] = p[1];
2076  cc_data[1] = p[2];
2077  cc_data[2] = p[3];
2078  cc_data[3] = p[4];
2079  }
2080 
2081  if (cc_count > 0) {
2082  int ret;
2083  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
2084  const uint64_t new_size = (old_size + cc_count * UINT64_C(3));
2085  if (new_size > 3 * A53_MAX_CC_COUNT)
2086  return AVERROR(EINVAL);
2087 
2088  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
2089  if (ret >= 0) {
2090  uint8_t *cap = s1->a53_buf_ref->data + old_size;
2091  cap[0] = cc_header;
2092  cap[1] = cc_data[0];
2093  cap[2] = cc_data[1];
2094  if (cc_count == 2) {
2095  cap[3] = cc_header;
2096  cap[4] = cc_data[2];
2097  cap[5] = cc_data[3];
2098  }
2099  }
2100 
2101  mpeg_set_cc_format(avctx, CC_FORMAT_DISH, "Dish Network");
2102  }
2103  return 1;
2104  }
2105  return 0;
2106 }
2107 
2109  const uint8_t *p, int buf_size)
2110 {
2111  const uint8_t *buf_end = p + buf_size;
2112  Mpeg1Context *s1 = avctx->priv_data;
2113 
2114 #if 0
2115  int i;
2116  for(i=0; !(!p[i-2] && !p[i-1] && p[i]==1) && i<buf_size; i++){
2117  av_log(avctx, AV_LOG_ERROR, "%c", p[i]);
2118  }
2119  av_log(avctx, AV_LOG_ERROR, "\n");
2120 #endif
2121 
2122  if (buf_size > 29){
2123  int i;
2124  for(i=0; i<20; i++)
2125  if (!memcmp(p+i, "\0TMPGEXS\0", 9)){
2126  s1->tmpgexs = 1;
2127  }
2128  }
2129  /* we parse the DTG active format information */
2130  if (buf_end - p >= 5 &&
2131  p[0] == 'D' && p[1] == 'T' && p[2] == 'G' && p[3] == '1') {
2132  int flags = p[4];
2133  p += 5;
2134  if (flags & 0x80) {
2135  /* skip event id */
2136  p += 2;
2137  }
2138  if (flags & 0x40) {
2139  if (buf_end - p < 1)
2140  return;
2141  s1->has_afd = 1;
2142  s1->afd = p[0] & 0x0f;
2143  }
2144  } else if (buf_end - p >= 6 &&
2145  p[0] == 'J' && p[1] == 'P' && p[2] == '3' && p[3] == 'D' &&
2146  p[4] == 0x03) { // S3D_video_format_length
2147  // the 0x7F mask ignores the reserved_bit value
2148  const uint8_t S3D_video_format_type = p[5] & 0x7F;
2149 
2150  if (S3D_video_format_type == 0x03 ||
2151  S3D_video_format_type == 0x04 ||
2152  S3D_video_format_type == 0x08 ||
2153  S3D_video_format_type == 0x23) {
2154 
2155  s1->has_stereo3d = 1;
2156 
2157  switch (S3D_video_format_type) {
2158  case 0x03:
2160  break;
2161  case 0x04:
2163  break;
2164  case 0x08:
2166  break;
2167  case 0x23:
2169  break;
2170  }
2171  }
2172  } else if (mpeg_decode_a53_cc(avctx, p, buf_size)) {
2173  return;
2174  }
2175 }
2176 
2178  const uint8_t *buf, int buf_size)
2179 {
2180  Mpeg1Context *s1 = avctx->priv_data;
2181  MPVContext *const s = &s1->slice.c;
2182  GetBitContext gb0, *const gb = &gb0;
2183  int broken_link;
2184  int64_t tc;
2185 
2186  int ret = init_get_bits8(gb, buf, buf_size);
2187  if (ret < 0)
2188  return ret;
2189 
2190  tc = s1->timecode_frame_start = get_bits(gb, 25);
2191 
2192  s1->closed_gop = get_bits1(gb);
2193  /* broken_link indicates that after editing the
2194  * reference frames of the first B-Frames after GOP I-Frame
2195  * are missing (open gop) */
2196  broken_link = get_bits1(gb);
2197 
2198  if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
2199  char tcbuf[AV_TIMECODE_STR_SIZE];
2201  av_log(s->avctx, AV_LOG_DEBUG,
2202  "GOP (%s) closed_gop=%d broken_link=%d\n",
2203  tcbuf, s1->closed_gop, broken_link);
2204  }
2205 
2206  return 0;
2207 }
2208 
2210  Mpeg1Context *const s)
2211 {
2212  if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE) &&
2213  !avctx->hwaccel) {
2214  MPVContext *const s2 = &s->slice.c;
2215  int error_count = 0;
2216 
2217  avctx->execute(avctx, slice_decode_thread,
2218  s2->mpeg12_contexts, NULL,
2219  s->slice_count, sizeof(s2->mpeg12_contexts[0]));
2220 
2221  for (int i = 0; i < s->slice_count; i++) {
2222  MpegEncContext *const slice = s2->thread_context[i];
2223  int slice_err = atomic_load_explicit(&slice->er.error_count,
2224  memory_order_relaxed);
2225  // error_count can get set to INT_MAX on serious errors.
2226  // So use saturated addition.
2227  if ((unsigned)slice_err > INT_MAX - error_count) {
2228  error_count = INT_MAX;
2229  break;
2230  }
2231  error_count += slice_err;
2232  }
2233  atomic_store_explicit(&s2->er.error_count, error_count,
2234  memory_order_relaxed);
2235  }
2236 }
2237 
2238 static int decode_chunks(AVCodecContext *avctx, AVFrame *picture,
2239  int *got_output, const uint8_t *buf, int buf_size)
2240 {
2241  Mpeg1Context *s = avctx->priv_data;
2242  MPVContext *const s2 = &s->slice.c;
2243  const uint8_t *buf_ptr = buf;
2244  const uint8_t *buf_end = buf + buf_size;
2245  int ret, input_size;
2246  int last_code = 0, skip_frame = 0;
2247  int picture_start_code_seen = 0;
2248 
2249  for (;;) {
2250  /* find next start code */
2251  uint32_t start_code = -1;
2252  buf_ptr = avpriv_find_start_code(buf_ptr, buf_end, &start_code);
2253  if (start_code > 0x1ff) {
2254  if (!skip_frame) {
2256 
2257  ret = slice_end(avctx, picture, got_output);
2258  if (ret < 0)
2259  return ret;
2260  }
2261  s2->pict_type = 0;
2262 
2263  if (avctx->err_recognition & AV_EF_EXPLODE && s2->er.error_count)
2264  return AVERROR_INVALIDDATA;
2265 
2266  return FFMAX(0, buf_ptr - buf);
2267  }
2268 
2269  input_size = buf_end - buf_ptr;
2270 
2271  if (avctx->debug & FF_DEBUG_STARTCODE)
2272  av_log(avctx, AV_LOG_DEBUG, "%3"PRIX32" at %"PTRDIFF_SPECIFIER" left %d\n",
2273  start_code, buf_ptr - buf, input_size);
2274 
2275  /* prepare data for next start code */
2276  switch (start_code) {
2277  case SEQ_START_CODE:
2278  if (last_code == 0) {
2279  mpeg1_decode_sequence(avctx, buf_ptr, input_size);
2280  if (buf != avctx->extradata)
2281  s->sync = 1;
2282  } else {
2283  av_log(avctx, AV_LOG_ERROR,
2284  "ignoring SEQ_START_CODE after %X\n", last_code);
2285  if (avctx->err_recognition & AV_EF_EXPLODE)
2286  return AVERROR_INVALIDDATA;
2287  }
2288  break;
2289 
2290  case PICTURE_START_CODE:
2291  if (picture_start_code_seen && s2->picture_structure == PICT_FRAME) {
2292  /* If it's a frame picture, there can't be more than one picture header.
2293  Yet, it does happen and we need to handle it. */
2294  av_log(avctx, AV_LOG_WARNING, "ignoring extra picture following a frame-picture\n");
2295  break;
2296  }
2297  picture_start_code_seen = 1;
2298 
2299  if (buf == avctx->extradata && avctx->codec_tag == AV_RL32("AVmp")) {
2300  av_log(avctx, AV_LOG_WARNING, "ignoring picture start code in AVmp extradata\n");
2301  break;
2302  }
2303 
2304  if (s2->width <= 0 || s2->height <= 0) {
2305  av_log(avctx, AV_LOG_ERROR, "Invalid frame dimensions %dx%d.\n",
2306  s2->width, s2->height);
2307  return AVERROR_INVALIDDATA;
2308  }
2309 
2310  if (s->tmpgexs){
2311  s2->intra_dc_precision= 3;
2312  s2->intra_matrix[0]= 1;
2313  }
2314  if (s->slice_count) {
2316  s->slice_count = 0;
2317  }
2318  if (last_code == 0 || last_code == SLICE_MIN_START_CODE) {
2319  ret = mpeg_decode_postinit(avctx);
2320  if (ret < 0) {
2321  av_log(avctx, AV_LOG_ERROR,
2322  "mpeg_decode_postinit() failure\n");
2323  return ret;
2324  }
2325 
2326  /* We have a complete image: we try to decompress it. */
2327  if (mpeg1_decode_picture(avctx, buf_ptr, input_size) < 0)
2328  s2->pict_type = 0;
2329  s->first_slice = 1;
2330  last_code = PICTURE_START_CODE;
2331  } else {
2332  av_log(avctx, AV_LOG_ERROR,
2333  "ignoring pic after %X\n", last_code);
2334  if (avctx->err_recognition & AV_EF_EXPLODE)
2335  return AVERROR_INVALIDDATA;
2336  }
2337  break;
2338  case EXT_START_CODE: {
2339  GetBitContext gb0, *const gb = &gb0;
2340 
2341  ret = init_get_bits8(gb, buf_ptr, input_size);
2342  if (ret < 0)
2343  return ret;
2344 
2345  switch (get_bits(gb, 4)) {
2346  case 0x1:
2347  if (last_code == 0) {
2349  } else {
2350  av_log(avctx, AV_LOG_ERROR,
2351  "ignoring seq ext after %X\n", last_code);
2352  if (avctx->err_recognition & AV_EF_EXPLODE)
2353  return AVERROR_INVALIDDATA;
2354  }
2355  break;
2356  case 0x2:
2358  break;
2359  case 0x3:
2361  break;
2362  case 0x7:
2364  break;
2365  case 0x8:
2366  if (last_code == PICTURE_START_CODE) {
2368  if (ret < 0)
2369  return ret;
2370  } else {
2371  av_log(avctx, AV_LOG_ERROR,
2372  "ignoring pic cod ext after %X\n", last_code);
2373  if (avctx->err_recognition & AV_EF_EXPLODE)
2374  return AVERROR_INVALIDDATA;
2375  }
2376  break;
2377  }
2378  break;
2379  }
2380  case USER_START_CODE:
2381  mpeg_decode_user_data(avctx, buf_ptr, input_size);
2382  break;
2383  case GOP_START_CODE:
2384  if (last_code == 0) {
2385  s2->first_field = 0;
2386  ret = mpeg_decode_gop(avctx, buf_ptr, input_size);
2387  if (ret < 0)
2388  return ret;
2389  s->sync = 1;
2390  } else {
2391  av_log(avctx, AV_LOG_ERROR,
2392  "ignoring GOP_START_CODE after %X\n", last_code);
2393  if (avctx->err_recognition & AV_EF_EXPLODE)
2394  return AVERROR_INVALIDDATA;
2395  }
2396  break;
2397  default:
2399  start_code <= SLICE_MAX_START_CODE && last_code == PICTURE_START_CODE) {
2400  if (s2->progressive_sequence && !s2->progressive_frame) {
2401  s2->progressive_frame = 1;
2402  av_log(s2->avctx, AV_LOG_ERROR,
2403  "interlaced frame in progressive sequence, ignoring\n");
2404  }
2405 
2406  if (s2->picture_structure == 0 ||
2408  av_log(s2->avctx, AV_LOG_ERROR,
2409  "picture_structure %d invalid, ignoring\n",
2410  s2->picture_structure);
2412  }
2413 
2415  av_log(s2->avctx, AV_LOG_WARNING, "invalid frame_pred_frame_dct\n");
2416 
2417  if (s2->picture_structure == PICT_FRAME) {
2418  s2->first_field = 0;
2419  s2->v_edge_pos = 16 * s2->mb_height;
2420  } else {
2421  s2->first_field ^= 1;
2422  s2->v_edge_pos = 8 * s2->mb_height;
2423  memset(s2->mbskip_table, 0, s2->mb_stride * s2->mb_height);
2424  }
2425  }
2427  start_code <= SLICE_MAX_START_CODE && last_code != 0) {
2428  const int field_pic = s2->picture_structure != PICT_FRAME;
2429  int mb_y = start_code - SLICE_MIN_START_CODE;
2430  last_code = SLICE_MIN_START_CODE;
2431  if (s2->codec_id != AV_CODEC_ID_MPEG1VIDEO && s2->mb_height > 2800/16)
2432  mb_y += (*buf_ptr&0xE0)<<2;
2433 
2434  mb_y <<= field_pic;
2436  mb_y++;
2437 
2438  if (buf_end - buf_ptr < 2) {
2439  av_log(s2->avctx, AV_LOG_ERROR, "slice too small\n");
2440  return AVERROR_INVALIDDATA;
2441  }
2442 
2443  if (mb_y >= s2->mb_height) {
2444  av_log(s2->avctx, AV_LOG_ERROR,
2445  "slice below image (%d >= %d)\n", mb_y, s2->mb_height);
2446  return AVERROR_INVALIDDATA;
2447  }
2448 
2449  if (!s2->last_pic.ptr) {
2450  /* Skip B-frames if we do not have reference frames and
2451  * GOP is not closed. */
2452  if (s2->pict_type == AV_PICTURE_TYPE_B) {
2453  if (!s->closed_gop) {
2454  skip_frame = 1;
2455  av_log(s2->avctx, AV_LOG_DEBUG,
2456  "Skipping B slice due to open GOP\n");
2457  break;
2458  }
2459  }
2460  }
2462  s->sync = 1;
2463  if (!s2->next_pic.ptr) {
2464  /* Skip P-frames if we do not have a reference frame or
2465  * we have an invalid header. */
2466  if (s2->pict_type == AV_PICTURE_TYPE_P && !s->sync) {
2467  skip_frame = 1;
2468  av_log(s2->avctx, AV_LOG_DEBUG,
2469  "Skipping P slice due to !sync\n");
2470  break;
2471  }
2472  }
2473  if ((avctx->skip_frame >= AVDISCARD_NONREF &&
2474  s2->pict_type == AV_PICTURE_TYPE_B) ||
2475  (avctx->skip_frame >= AVDISCARD_NONKEY &&
2476  s2->pict_type != AV_PICTURE_TYPE_I) ||
2477  avctx->skip_frame >= AVDISCARD_ALL) {
2478  skip_frame = 1;
2479  break;
2480  }
2481 
2482  if (!s2->context_initialized)
2483  break;
2484 
2485  if (s2->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
2486  if (mb_y < avctx->skip_top ||
2487  mb_y >= s2->mb_height - avctx->skip_bottom)
2488  break;
2489  }
2490 
2491  if (!s2->pict_type) {
2492  av_log(avctx, AV_LOG_ERROR, "Missing picture start code\n");
2493  if (avctx->err_recognition & AV_EF_EXPLODE)
2494  return AVERROR_INVALIDDATA;
2495  break;
2496  }
2497 
2498  if (s->first_slice) {
2499  skip_frame = 0;
2500  s->first_slice = 0;
2501  if ((ret = mpeg_field_start(s, buf, buf_size)) < 0)
2502  return ret;
2503  }
2504  if (!s2->cur_pic.ptr) {
2505  av_log(avctx, AV_LOG_ERROR,
2506  "current_picture not initialized\n");
2507  return AVERROR_INVALIDDATA;
2508  }
2509 
2510  if (HAVE_THREADS &&
2511  (avctx->active_thread_type & FF_THREAD_SLICE) &&
2512  !avctx->hwaccel) {
2513  int threshold = (s2->mb_height * s->slice_count +
2514  s2->slice_context_count / 2) /
2515  s2->slice_context_count;
2516  if (threshold <= mb_y) {
2517  Mpeg12SliceContext *const thread_context = s2->mpeg12_contexts[s->slice_count];
2518 
2519  thread_context->c.start_mb_y = mb_y;
2520  thread_context->c.end_mb_y = s2->mb_height;
2521  if (s->slice_count) {
2522  s2->thread_context[s->slice_count - 1]->end_mb_y = mb_y;
2523  ret = ff_update_duplicate_context(&thread_context->c, s2);
2524  if (ret < 0)
2525  return ret;
2526  }
2527  ret = init_get_bits8(&thread_context->gb, buf_ptr, input_size);
2528  if (ret < 0)
2529  return ret;
2530  s->slice_count++;
2531  }
2532  buf_ptr += 2; // FIXME add minimum number of bytes per slice
2533  } else {
2534  ret = mpeg_decode_slice(&s->slice, mb_y, &buf_ptr, input_size);
2535  emms_c();
2536 
2537  if (ret < 0) {
2538  if (avctx->err_recognition & AV_EF_EXPLODE)
2539  return ret;
2540  if (s2->resync_mb_x >= 0 && s2->resync_mb_y >= 0)
2541  ff_er_add_slice(&s2->er, s2->resync_mb_x,
2542  s2->resync_mb_y, s2->mb_x, s2->mb_y,
2544  } else {
2545  ff_er_add_slice(&s2->er, s2->resync_mb_x,
2546  s2->resync_mb_y, s2->mb_x - 1, s2->mb_y,
2548  }
2549  }
2550  }
2551  break;
2552  }
2553  }
2554 }
2555 
2556 static int mpeg_decode_frame(AVCodecContext *avctx, AVFrame *picture,
2557  int *got_output, AVPacket *avpkt)
2558 {
2559  const uint8_t *buf = avpkt->data;
2560  int ret;
2561  int buf_size = avpkt->size;
2562  Mpeg1Context *s = avctx->priv_data;
2563  MPVContext *const s2 = &s->slice.c;
2564 
2565  if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == SEQ_END_CODE)) {
2566  /* special case for last picture */
2567  if (s2->low_delay == 0 && s2->next_pic.ptr) {
2568  int ret = av_frame_ref(picture, s2->next_pic.ptr->f);
2569  if (ret < 0)
2570  return ret;
2571 
2573 
2574  *got_output = 1;
2575  }
2576  return buf_size;
2577  }
2578 
2579  if (!s2->context_initialized &&
2580  (s2->codec_tag == AV_RL32("VCR2") || s2->codec_tag == AV_RL32("BW10")))
2581  vcr2_init_sequence(avctx);
2582 
2583  s->slice_count = 0;
2584 
2585  if (avctx->extradata && !s->extradata_decoded) {
2586  ret = decode_chunks(avctx, picture, got_output,
2587  avctx->extradata, avctx->extradata_size);
2588  if (*got_output) {
2589  av_log(avctx, AV_LOG_ERROR, "picture in extradata\n");
2590  av_frame_unref(picture);
2591  *got_output = 0;
2592  }
2593  s->extradata_decoded = 1;
2594  if (ret < 0 && (avctx->err_recognition & AV_EF_EXPLODE)) {
2596  return ret;
2597  }
2598  }
2599 
2600  ret = decode_chunks(avctx, picture, got_output, buf, buf_size);
2601  if (ret<0 || *got_output) {
2603 
2604  if (s->timecode_frame_start != -1 && *got_output) {
2605  char tcbuf[AV_TIMECODE_STR_SIZE];
2606  AVFrameSideData *tcside = av_frame_new_side_data(picture,
2608  sizeof(int64_t));
2609  if (!tcside)
2610  return AVERROR(ENOMEM);
2611  memcpy(tcside->data, &s->timecode_frame_start, sizeof(int64_t));
2612 
2613  av_timecode_make_mpeg_tc_string(tcbuf, s->timecode_frame_start);
2614  av_dict_set(&picture->metadata, "timecode", tcbuf, 0);
2615 
2616  s->timecode_frame_start = -1;
2617  }
2618  }
2619 
2620  return ret;
2621 }
2622 
2623 static av_cold void flush(AVCodecContext *avctx)
2624 {
2625  Mpeg1Context *s = avctx->priv_data;
2626 
2627  s->sync = 0;
2628  s->closed_gop = 0;
2629 
2630  av_buffer_unref(&s->a53_buf_ref);
2631  ff_mpeg_flush(avctx);
2632 }
2633 
2635 {
2636  Mpeg1Context *s = avctx->priv_data;
2637 
2638  av_buffer_unref(&s->a53_buf_ref);
2639  return ff_mpv_decode_close(avctx);
2640 }
2641 
2643  .p.name = "mpeg1video",
2644  CODEC_LONG_NAME("MPEG-1 video"),
2645  .p.type = AVMEDIA_TYPE_VIDEO,
2646  .p.id = AV_CODEC_ID_MPEG1VIDEO,
2647  .priv_data_size = sizeof(Mpeg1Context),
2651  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2653  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2654  .flush = flush,
2655  .p.max_lowres = 3,
2656  .hw_configs = (const AVCodecHWConfigInternal *const []) {
2657 #if CONFIG_MPEG1_NVDEC_HWACCEL
2658  HWACCEL_NVDEC(mpeg1),
2659 #endif
2660 #if CONFIG_MPEG1_VDPAU_HWACCEL
2661  HWACCEL_VDPAU(mpeg1),
2662 #endif
2663 #if CONFIG_MPEG1_VIDEOTOOLBOX_HWACCEL
2664  HWACCEL_VIDEOTOOLBOX(mpeg1),
2665 #endif
2666  NULL
2667  },
2668 };
2669 
2670 #define M2V_OFFSET(x) offsetof(Mpeg1Context, x)
2671 #define M2V_PARAM AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
2672 
2673 static const AVOption mpeg2video_options[] = {
2674  { "cc_format", "extract a specific Closed Captions format",
2675  M2V_OFFSET(cc_format), AV_OPT_TYPE_INT, { .i64 = CC_FORMAT_AUTO },
2676  CC_FORMAT_AUTO, CC_FORMAT_DISH, M2V_PARAM, .unit = "cc_format" },
2677 
2678  { "auto", "pick first seen CC substream", 0, AV_OPT_TYPE_CONST,
2679  { .i64 = CC_FORMAT_AUTO }, .flags = M2V_PARAM, .unit = "cc_format" },
2680  { "a53", "pick A/53 Part 4 CC substream", 0, AV_OPT_TYPE_CONST,
2681  { .i64 = CC_FORMAT_A53_PART4 }, .flags = M2V_PARAM, .unit = "cc_format" },
2682  { "scte20", "pick SCTE-20 CC substream", 0, AV_OPT_TYPE_CONST,
2683  { .i64 = CC_FORMAT_SCTE20 }, .flags = M2V_PARAM, .unit = "cc_format" },
2684  { "dvd", "pick DVD CC substream", 0, AV_OPT_TYPE_CONST,
2685  { .i64 = CC_FORMAT_DVD }, .flags = M2V_PARAM, .unit = "cc_format" },
2686  { "dish", "pick Dish Network CC substream", 0, AV_OPT_TYPE_CONST,
2687  { .i64 = CC_FORMAT_DISH }, .flags = M2V_PARAM, .unit = "cc_format" },
2688  { NULL }
2689 };
2690 
2691 static const AVClass mpeg2video_class = {
2692  .class_name = "MPEG-2 video",
2693  .item_name = av_default_item_name,
2694  .option = mpeg2video_options,
2695  .version = LIBAVUTIL_VERSION_INT,
2696  .category = AV_CLASS_CATEGORY_DECODER,
2697 };
2698 
2700  .p.name = "mpeg2video",
2701  CODEC_LONG_NAME("MPEG-2 video"),
2702  .p.type = AVMEDIA_TYPE_VIDEO,
2703  .p.id = AV_CODEC_ID_MPEG2VIDEO,
2704  .p.priv_class = &mpeg2video_class,
2705  .priv_data_size = sizeof(Mpeg1Context),
2709  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2711  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2712  .flush = flush,
2713  .p.max_lowres = 3,
2715  .hw_configs = (const AVCodecHWConfigInternal *const []) {
2716 #if CONFIG_MPEG2_DXVA2_HWACCEL
2717  HWACCEL_DXVA2(mpeg2),
2718 #endif
2719 #if CONFIG_MPEG2_D3D11VA_HWACCEL
2720  HWACCEL_D3D11VA(mpeg2),
2721 #endif
2722 #if CONFIG_MPEG2_D3D11VA2_HWACCEL
2723  HWACCEL_D3D11VA2(mpeg2),
2724 #endif
2725 #if CONFIG_MPEG2_D3D12VA_HWACCEL
2726  HWACCEL_D3D12VA(mpeg2),
2727 #endif
2728 #if CONFIG_MPEG2_NVDEC_HWACCEL
2729  HWACCEL_NVDEC(mpeg2),
2730 #endif
2731 #if CONFIG_MPEG2_VAAPI_HWACCEL
2732  HWACCEL_VAAPI(mpeg2),
2733 #endif
2734 #if CONFIG_MPEG2_VDPAU_HWACCEL
2735  HWACCEL_VDPAU(mpeg2),
2736 #endif
2737 #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
2738  HWACCEL_VIDEOTOOLBOX(mpeg2),
2739 #endif
2740  NULL
2741  },
2742 };
2743 
2744 //legacy decoder
2746  .p.name = "mpegvideo",
2747  CODEC_LONG_NAME("MPEG-1 video"),
2748  .p.type = AVMEDIA_TYPE_VIDEO,
2749  .p.id = AV_CODEC_ID_MPEG2VIDEO,
2750  .priv_data_size = sizeof(Mpeg1Context),
2754  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2756  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2757  .flush = flush,
2758  .p.max_lowres = 3,
2759 };
2760 
2761 typedef struct IPUContext {
2763 
2764  int flags;
2765 } IPUContext;
2766 
2768  int *got_frame, AVPacket *avpkt)
2769 {
2770  IPUContext *s = avctx->priv_data;
2771  MPVContext *const m = &s->m.c;
2772  GetBitContext *const gb = &s->m.gb;
2773  int16_t (*const block)[64] = s->m.block;
2774  int ret;
2775 
2776  // Check for minimal intra MB size (considering mb header, luma & chroma dc VLC, ac EOB VLC)
2777  if (avpkt->size*8LL < (avctx->width+15)/16 * ((avctx->height+15)/16) * (2LL + 3*4 + 2*2 + 2*6))
2778  return AVERROR_INVALIDDATA;
2779 
2780  ret = ff_get_buffer(avctx, frame, 0);
2781  if (ret < 0)
2782  return ret;
2783 
2784  ret = init_get_bits8(gb, avpkt->data, avpkt->size);
2785  if (ret < 0)
2786  return ret;
2787 
2788  s->flags = get_bits(gb, 8);
2789  m->intra_dc_precision = s->flags & 3;
2790  m->q_scale_type = !!(s->flags & 0x40);
2791  m->intra_vlc_format = !!(s->flags & 0x20);
2792  m->alternate_scan = !!(s->flags & 0x10);
2793 
2795  s->flags & 0x10 ? ff_alternate_vertical_scan : ff_zigzag_direct,
2796  m->idsp.idct_permutation);
2797 
2798  s->m.last_dc[0] = s->m.last_dc[1] = s->m.last_dc[2] = 128 << (s->flags & 3);
2799  m->qscale = 1;
2800 
2801  for (int y = 0; y < avctx->height; y += 16) {
2802  int intraquant;
2803 
2804  for (int x = 0; x < avctx->width; x += 16) {
2805  if (x || y) {
2806  if (!get_bits1(gb))
2807  return AVERROR_INVALIDDATA;
2808  }
2809  if (get_bits1(gb)) {
2810  intraquant = 0;
2811  } else {
2812  if (!get_bits1(gb))
2813  return AVERROR_INVALIDDATA;
2814  intraquant = 1;
2815  }
2816 
2817  if (s->flags & 4)
2818  skip_bits1(gb);
2819 
2820  if (intraquant)
2821  m->qscale = mpeg_get_qscale(gb, m->q_scale_type);
2822 
2823  memset(block, 0, 6 * sizeof(*block));
2824 
2825  for (int n = 0; n < 6; n++) {
2826  if (s->flags & 0x80) {
2828  m->intra_matrix,
2830  s->m.last_dc, block[n],
2831  n, m->qscale);
2832  } else {
2833  ret = mpeg2_decode_block_intra(&s->m, block[n], n);
2834  }
2835 
2836  if (ret < 0)
2837  return ret;
2838  }
2839 
2840  m->idsp.idct_put(frame->data[0] + y * frame->linesize[0] + x,
2841  frame->linesize[0], block[0]);
2842  m->idsp.idct_put(frame->data[0] + y * frame->linesize[0] + x + 8,
2843  frame->linesize[0], block[1]);
2844  m->idsp.idct_put(frame->data[0] + (y + 8) * frame->linesize[0] + x,
2845  frame->linesize[0], block[2]);
2846  m->idsp.idct_put(frame->data[0] + (y + 8) * frame->linesize[0] + x + 8,
2847  frame->linesize[0], block[3]);
2848  m->idsp.idct_put(frame->data[1] + (y >> 1) * frame->linesize[1] + (x >> 1),
2849  frame->linesize[1], block[4]);
2850  m->idsp.idct_put(frame->data[2] + (y >> 1) * frame->linesize[2] + (x >> 1),
2851  frame->linesize[2], block[5]);
2852  }
2853  }
2854 
2855  align_get_bits(gb);
2856  if (get_bits_left(gb) != 32)
2857  return AVERROR_INVALIDDATA;
2858 
2859  *got_frame = 1;
2860 
2861  return avpkt->size;
2862 }
2863 
2865 {
2866  IPUContext *s = avctx->priv_data;
2867  MPVContext *const m = &s->m.c;
2868 
2869  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
2870  m->avctx = avctx;
2871 
2872  ff_idctdsp_init(&m->idsp, avctx);
2874 
2875  for (int i = 0; i < 64; i++) {
2876  int j = m->idsp.idct_permutation[i];
2878  m->intra_matrix[j] = v;
2879  m->chroma_intra_matrix[j] = v;
2880  }
2881 
2882  return 0;
2883 }
2884 
2886  .p.name = "ipu",
2887  CODEC_LONG_NAME("IPU Video"),
2888  .p.type = AVMEDIA_TYPE_VIDEO,
2889  .p.id = AV_CODEC_ID_IPU,
2890  .priv_data_size = sizeof(IPUContext),
2891  .init = ipu_decode_init,
2893  .p.capabilities = AV_CODEC_CAP_DR1,
2894 };
PICT_FRAME
#define PICT_FRAME
Definition: mpegutils.h:33
vcr2_init_sequence
static int vcr2_init_sequence(AVCodecContext *avctx)
Definition: mpeg12dec.c:1833
flags
const SwsFlags flags[]
Definition: swscale.c:61
HWACCEL_D3D12VA
#define HWACCEL_D3D12VA(codec)
Definition: hwconfig.h:80
ff_mpv_common_init
av_cold int ff_mpv_common_init(MpegEncContext *s)
init common structure for both encoder and decoder.
Definition: mpegvideo.c:378
hwconfig.h
AVCodecContext::hwaccel
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:1405
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
MV_TYPE_16X16
#define MV_TYPE_16X16
1 vector for the whole mb
Definition: mpegvideo.h:174
Mpeg1Context::slice
Mpeg12SliceContext slice
Definition: mpeg12dec.c:82
Mpeg1Context::has_afd
int has_afd
Definition: mpeg12dec.c:89
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AV_TIMECODE_STR_SIZE
#define AV_TIMECODE_STR_SIZE
Definition: timecode.h:33
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:260
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
MpegEncContext::progressive_sequence
int progressive_sequence
Definition: mpegvideo.h:247
M2V_OFFSET
#define M2V_OFFSET(x)
Definition: mpeg12dec.c:2670
ff_mb_pat_vlc
VLCElem ff_mb_pat_vlc[512]
Definition: mpeg12.c:135
level
uint8_t level
Definition: svq3.c:208
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: defs.h:51
Mpeg1Context::a53_buf_ref
AVBufferRef * a53_buf_ref
Definition: mpeg12dec.c:86
ff_mpeg2_aspect
const AVRational ff_mpeg2_aspect[16]
Definition: mpeg12data.c:380
AVPanScan::position
int16_t position[3][2]
position of the top left corner in 1/16 pel for up to 3 fields/frames
Definition: defs.h:274
show_bits_long
static unsigned int show_bits_long(GetBitContext *s, int n)
Show 0-32 bits.
Definition: get_bits.h:498
mpeg_decode_a53_cc
static int mpeg_decode_a53_cc(AVCodecContext *avctx, const uint8_t *p, int buf_size)
Definition: mpeg12dec.c:1903
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:694
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
decode_slice
static int decode_slice(AVCodecContext *c, void *arg)
Definition: ffv1dec.c:360
ff_mpv_decode_init
av_cold int ff_mpv_decode_init(MpegEncContext *s, AVCodecContext *avctx)
Initialize the given MpegEncContext for decoding.
Definition: mpegvideo_dec.c:86
AV_CLASS_CATEGORY_DECODER
@ AV_CLASS_CATEGORY_DECODER
Definition: log.h:35
AV_STEREO3D_SIDEBYSIDE_QUINCUNX
@ AV_STEREO3D_SIDEBYSIDE_QUINCUNX
Views are next to each other, but when upscaling apply a checkerboard pattern.
Definition: stereo3d.h:114
IPUContext::m
Mpeg12SliceContext m
Definition: mpeg12dec.c:2762
FF_MPV_QSCALE_TYPE_MPEG2
#define FF_MPV_QSCALE_TYPE_MPEG2
Definition: mpegvideodec.h:41
mem_internal.h
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1207
mpeg_decode_frame
static int mpeg_decode_frame(AVCodecContext *avctx, AVFrame *picture, int *got_output, AVPacket *avpkt)
Definition: mpeg12dec.c:2556
AV_EF_COMPLIANT
#define AV_EF_COMPLIANT
consider all spec non compliances as errors
Definition: defs.h:55
MpegEncContext::top_field_first
int top_field_first
Definition: mpegvideo.h:255
SEQ_END_CODE
#define SEQ_END_CODE
Definition: mpeg12.h:28
av_frame_new_side_data
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, size_t size)
Add a new side data to a frame.
Definition: frame.c:647
check_scantable_index
#define check_scantable_index(ctx, x)
Definition: mpeg12dec.c:133
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AV_FRAME_DATA_A53_CC
@ AV_FRAME_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
Definition: frame.h:59
MT_FIELD
#define MT_FIELD
Definition: mpeg12dec.c:401
EXT_START_CODE
#define EXT_START_CODE
Definition: cavs.h:39
MV_TYPE_16X8
#define MV_TYPE_16X8
2 vectors, one per 16x8 block
Definition: mpegvideo.h:176
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
AVPanScan
Pan Scan area.
Definition: defs.h:253
AVCodecContext::err_recognition
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1398
SLICE_MAX_START_CODE
#define SLICE_MAX_START_CODE
Definition: cavs.h:38
int64_t
long long int64_t
Definition: coverity.c:34
MB_TYPE_16x8
#define MB_TYPE_16x8
Definition: mpegutils.h:42
get_bits_count
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:254
Mpeg1Context::vbv_delay
int vbv_delay
Definition: mpeg12dec.c:100
ipu_decode_init
static av_cold int ipu_decode_init(AVCodecContext *avctx)
Definition: mpeg12dec.c:2864
ff_update_duplicate_context
int ff_update_duplicate_context(MpegEncContext *dst, const MpegEncContext *src)
Definition: mpegvideo.c:158
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
start_code
static const uint8_t start_code[]
Definition: videotoolboxenc.c:230
w
uint8_t w
Definition: llviddspenc.c:38
HWACCEL_DXVA2
#define HWACCEL_DXVA2(codec)
Definition: hwconfig.h:64
ff_mpegvideo_decoder
const FFCodec ff_mpegvideo_decoder
Definition: mpeg12dec.c:2745
AVPacket::data
uint8_t * data
Definition: packet.h:588
Mpeg1Context::closed_gop
int closed_gop
Definition: mpeg12dec.c:96
AVOption
AVOption.
Definition: opt.h:429
HWACCEL_D3D11VA2
#define HWACCEL_D3D11VA2(codec)
Definition: hwconfig.h:66
ff_reverse
const uint8_t ff_reverse[256]
Definition: reverse.c:23
MB_TYPE_16x16
#define MB_TYPE_16x16
Definition: mpegutils.h:41
mpeg_decode_mb
static int mpeg_decode_mb(Mpeg12SliceContext *const s, int *mb_skip_run)
Definition: mpeg12dec.c:406
AV_PIX_FMT_D3D11VA_VLD
@ AV_PIX_FMT_D3D11VA_VLD
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
Definition: pixfmt.h:254
FFCodec
Definition: codec_internal.h:127
ff_mpv_framesize_disable
static void ff_mpv_framesize_disable(ScratchpadContext *sc)
Disable allocating the ScratchpadContext's buffers in future calls to ff_mpv_framesize_alloc().
Definition: mpegpicture.h:143
PICT_BOTTOM_FIELD
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:32
FF_HW_SIMPLE_CALL
#define FF_HW_SIMPLE_CALL(avctx, function)
Definition: hwaccel_internal.h:176
ff_er_add_slice
void ff_er_add_slice(ERContext *s, int startx, int starty, int endx, int endy, int status)
Add a slice.
Definition: error_resilience.c:828
ff_init_block_index
void ff_init_block_index(MpegEncContext *s)
Definition: mpegvideo.c:491
reverse.h
mpegvideo.h
MpegEncContext::avctx
struct AVCodecContext * avctx
Definition: mpegvideo.h:81
UPDATE_CACHE
#define UPDATE_CACHE(name, gb)
Definition: get_bits.h:213
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
Mpeg1Context::first_slice
int first_slice
Definition: mpeg12dec.c:98
ER_DC_END
#define ER_DC_END
Definition: error_resilience.h:33
mpeg_decode_postinit
static int mpeg_decode_postinit(AVCodecContext *avctx)
Definition: mpeg12dec.c:862
MpegEncContext::height
int height
picture size. must be a multiple of 16
Definition: mpegvideo.h:86
mpegutils.h
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:91
ER_MV_ERROR
#define ER_MV_ERROR
Definition: error_resilience.h:31
ff_idctdsp_init
av_cold void ff_idctdsp_init(IDCTDSPContext *c, AVCodecContext *avctx)
Definition: idctdsp.c:228
mpeg_decode_quant_matrix_extension
static void mpeg_decode_quant_matrix_extension(MPVContext *const s, GetBitContext *const gb)
Definition: mpeg12dec.c:1150
SEQ_START_CODE
#define SEQ_START_CODE
Definition: mpeg12.h:29
FF_DEBUG_PICT_INFO
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1375
MV_TYPE_DMV
#define MV_TYPE_DMV
2 vectors, special mpeg2 Dual Prime Vectors
Definition: mpegvideo.h:178
CC_FORMAT_DISH
@ CC_FORMAT_DISH
Definition: mpeg12dec.c:69
MpegEncContext::out_format
enum OutputFormat out_format
output format
Definition: mpegvideo.h:87
AV_FRAME_FLAG_TOP_FIELD_FIRST
#define AV_FRAME_FLAG_TOP_FIELD_FIRST
A flag to mark frames where the top field is displayed first if the content is interlaced.
Definition: frame.h:655
GET_CACHE
#define GET_CACHE(name, gb)
Definition: get_bits.h:251
skip_bits
static void skip_bits(GetBitContext *s, int n)
Definition: get_bits.h:383
ff_mpeg2_rl_vlc
RL_VLC_ELEM ff_mpeg2_rl_vlc[674]
Definition: mpeg12.c:138
MpegEncContext::intra_scantable
ScanTable intra_scantable
Definition: mpegvideo.h:77
AVCodecContext::framerate
AVRational framerate
Definition: avcodec.h:551
ff_permute_scantable
av_cold void ff_permute_scantable(uint8_t dst[64], const uint8_t src[64], const uint8_t permutation[64])
Definition: idctdsp.c:30
close
static av_cold void close(AVCodecParserContext *s)
Definition: apv_parser.c:136
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:64
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:337
mx
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t mx
Definition: dsp.h:57
MT_DMV
#define MT_DMV
Definition: mpeg12dec.c:404
MpegEncContext::mb_height
int mb_height
number of MBs horizontally & vertically
Definition: mpegvideo.h:98
ff_mbincr_vlc
VLCElem ff_mbincr_vlc[538]
Definition: mpeg12.c:132
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict, int *got_output)
Handle slice ends.
Definition: mpeg12dec.c:1693
FMT_MPEG1
@ FMT_MPEG1
Definition: mpegvideo.h:53
decode_chunks
static int decode_chunks(AVCodecContext *avctx, AVFrame *picture, int *got_output, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:2238
AVCodecContext::skip_frame
enum AVDiscard skip_frame
Skip decoding for selected frames.
Definition: avcodec.h:1662
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:52
MpegEncContext::picture_structure
int picture_structure
Definition: mpegvideo.h:251
wrap
#define wrap(func)
Definition: neontest.h:65
timecode.h
GetBitContext
Definition: get_bits.h:109
AV_EF_BITSTREAM
#define AV_EF_BITSTREAM
detect bitstream specification deviations
Definition: defs.h:49
AVPanScan::width
int width
width and height in 1/16 pel
Definition: defs.h:266
slice_decode_thread
static int slice_decode_thread(AVCodecContext *c, void *arg)
Definition: mpeg12dec.c:1639
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:488
IDCTDSPContext::idct_put
void(* idct_put)(uint8_t *dest, ptrdiff_t line_size, int16_t *block)
block -> idct -> clip to unsigned 8 bit -> dest.
Definition: idctdsp.h:62
MB_TYPE_CBP
#define MB_TYPE_CBP
Definition: mpegutils.h:47
val
static double val(void *priv, double ch)
Definition: aeval.c:77
Mpeg1Context::tmpgexs
int tmpgexs
Definition: mpeg12dec.c:97
HWACCEL_VDPAU
#define HWACCEL_VDPAU(codec)
Definition: hwconfig.h:72
AV_CODEC_FLAG_LOW_DELAY
#define AV_CODEC_FLAG_LOW_DELAY
Force low delay.
Definition: avcodec.h:314
mpeg12_pixfmt_list_444
static enum AVPixelFormat mpeg12_pixfmt_list_444[]
Definition: mpeg12dec.c:834
mpeg_decode_slice
static int mpeg_decode_slice(Mpeg12SliceContext *const s, int mb_y, const uint8_t **buf, int buf_size)
Decode a slice.
Definition: mpeg12dec.c:1359
MpegEncContext::width
int width
Definition: mpegvideo.h:86
AVCodecContext::coded_height
int coded_height
Definition: avcodec.h:607
mpeg1_decode_sequence
static int mpeg1_decode_sequence(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:1743
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
HAS_CBP
#define HAS_CBP(a)
Definition: mpegutils.h:87
AVRational::num
int num
Numerator.
Definition: rational.h:59
GOP_START_CODE
#define GOP_START_CODE
Definition: mpeg12.h:30
MpegEncContext::frame_pred_frame_dct
int frame_pred_frame_dct
Definition: mpegvideo.h:254
mpeg_decode_sequence_display_extension
static void mpeg_decode_sequence_display_extension(Mpeg1Context *const s1, GetBitContext *const gb)
Definition: mpeg12dec.c:1067
ff_frame_new_side_data_from_buf
int ff_frame_new_side_data_from_buf(const AVCodecContext *avctx, AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef **buf)
Similar to ff_frame_new_side_data, but using an existing buffer ref.
Definition: decode.c:2164
IPUContext
Definition: mpeg12dec.c:2761
mpeg1_hwaccel_pixfmt_list_420
static enum AVPixelFormat mpeg1_hwaccel_pixfmt_list_420[]
Definition: mpeg12dec.c:791
MpegEncContext::slice_ctx_size
unsigned slice_ctx_size
If set, ff_mpv_common_init() will allocate slice contexts of this size.
Definition: mpegvideo.h:284
mpeg12.h
mpegvideodec.h
ff_mpeg2video_decoder
const FFCodec ff_mpeg2video_decoder
Definition: mpeg12dec.c:2699
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
Mpeg1Context::frame_rate_index
unsigned frame_rate_index
Definition: mpeg12dec.c:94
ipu_decode_frame
static int ipu_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: mpeg12dec.c:2767
HAS_MV
#define HAS_MV(a, dir)
Definition: mpegutils.h:91
ER_DC_ERROR
#define ER_DC_ERROR
Definition: error_resilience.h:30
av_cold
#define av_cold
Definition: attributes.h:106
mpeg2_hwaccel_pixfmt_list_420
static enum AVPixelFormat mpeg2_hwaccel_pixfmt_list_420[]
Definition: mpeg12dec.c:802
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:544
mpeg1_decode_picture
static int mpeg1_decode_picture(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:977
Mpeg1Context::save_progressive_seq
int save_progressive_seq
Definition: mpeg12dec.c:92
emms_c
#define emms_c()
Definition: emms.h:63
CLOSE_READER
#define CLOSE_READER(name, gb)
Definition: get_bits.h:189
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:515
AVCodecContext::has_b_frames
int has_b_frames
Size of the frame reordering buffer in the decoder.
Definition: avcodec.h:697
A53_MAX_CC_COUNT
#define A53_MAX_CC_COUNT
Definition: mpeg12dec.c:62
Mpeg1Context::stereo3d_type
enum AVStereo3DType stereo3d_type
Definition: mpeg12dec.c:84
ff_er_frame_end
void ff_er_frame_end(ERContext *s, int *decode_error_flags)
Indicate that a frame has finished decoding and perform error concealment in case it has been enabled...
Definition: error_resilience.c:898
ff_mpeg_flush
av_cold void ff_mpeg_flush(AVCodecContext *avctx)
Definition: mpegvideo_dec.c:452
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:347
stereo3d.h
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
s
#define s(width, name)
Definition: cbs_vp9.c:198
ff_mv_vlc
VLCElem ff_mv_vlc[266]
Definition: mpeg12.c:127
CHROMA_422
#define CHROMA_422
Definition: mpegvideo.h:264
MPVWorkPicture::ptr
MPVPicture * ptr
RefStruct reference.
Definition: mpegpicture.h:99
ff_mpeg1_aspect
const float ff_mpeg1_aspect[16]
Definition: mpeg12data.c:359
MB_TYPE_ZERO_MV
#define MB_TYPE_ZERO_MV
Definition: mpeg12dec.h:28
SHOW_SBITS
#define SHOW_SBITS(name, gb, num)
Definition: get_bits.h:248
ff_mpeg_er_frame_start
void ff_mpeg_er_frame_start(MpegEncContext *s)
Definition: mpeg_er.c:46
flush
static av_cold void flush(AVCodecContext *avctx)
Definition: mpeg12dec.c:2623
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:41
Mpeg1Context::aspect_ratio_info
unsigned aspect_ratio_info
Definition: mpeg12dec.c:91
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
Mpeg1Context::pan_scan
AVPanScan pan_scan
Definition: mpeg12dec.c:83
get_sbits
static int get_sbits(GetBitContext *s, int n)
Definition: get_bits.h:322
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
PICT_TOP_FIELD
#define PICT_TOP_FIELD
Definition: mpegutils.h:31
decode.h
mpeg2_decode_block_non_intra
static int mpeg2_decode_block_non_intra(Mpeg12SliceContext *const s, int16_t *block, int n)
Definition: mpeg12dec.c:226
get_bits.h
mpeg12_pixfmt_list_422
static enum AVPixelFormat mpeg12_pixfmt_list_422[]
Definition: mpeg12dec.c:829
SKIP_BITS
#define SKIP_BITS(name, gb, num)
Definition: get_bits.h:229
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
AVCodecContext::rc_max_rate
int64_t rc_max_rate
maximum bitrate
Definition: avcodec.h:1270
MpegEncContext::cur_pic
MPVWorkPicture cur_pic
copy of the current picture structure.
Definition: mpegvideo.h:134
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:332
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:441
my
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t my
Definition: dsp.h:57
arg
const char * arg
Definition: jacosubdec.c:67
rl_vlc
static const VLCElem * rl_vlc[2]
Definition: mobiclip.c:279
ff_mpv_common_end
av_cold void ff_mpv_common_end(MpegEncContext *s)
Definition: mpegvideo.c:447
MpegEncContext::mb_stride
int mb_stride
mb_width+1 used for some arrays to allow simple addressing of left & top MBs without sig11
Definition: mpegvideo.h:99
Mpeg12SliceContext
Definition: mpeg12dec.c:72
if
if(ret)
Definition: filter_design.txt:179
ff_mpv_unref_picture
void ff_mpv_unref_picture(MPVWorkPicture *pic)
Definition: mpegpicture.c:98
MpegEncContext::low_delay
int low_delay
no reordering needed / has no B-frames
Definition: mpegvideo.h:230
AVDISCARD_ALL
@ AVDISCARD_ALL
discard all
Definition: defs.h:232
MB_PTYPE_VLC_BITS
#define MB_PTYPE_VLC_BITS
Definition: mpeg12vlc.h:39
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
MpegEncContext::mpeg12_contexts
struct Mpeg12SliceContext * mpeg12_contexts[MAX_THREADS]
Definition: mpegvideo.h:113
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
PTRDIFF_SPECIFIER
#define PTRDIFF_SPECIFIER
Definition: internal.h:118
ff_mpv_export_qp_table
int ff_mpv_export_qp_table(const MpegEncContext *s, AVFrame *f, const MPVPicture *p, int qp_type)
Definition: mpegvideo_dec.c:413
NULL
#define NULL
Definition: coverity.c:32
format
New swscale design to change SwsGraph is what coordinates multiple passes These can include cascaded scaling error diffusion and so on Or we could have separate passes for the vertical and horizontal scaling In between each SwsPass lies a fully allocated image buffer Graph passes may have different levels of e g we can have a single threaded error diffusion pass following a multi threaded scaling pass SwsGraph is internally recreated whenever the image format
Definition: swscale-v2.txt:14
run
uint8_t run
Definition: svq3.c:207
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:669
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
ER_AC_ERROR
#define ER_AC_ERROR
Definition: error_resilience.h:29
MpegEncContext::mb_y
int mb_y
Definition: mpegvideo.h:193
SLICE_MIN_START_CODE
#define SLICE_MIN_START_CODE
Definition: mpeg12.h:32
mpeg12_execute_slice_threads
static void mpeg12_execute_slice_threads(AVCodecContext *avctx, Mpeg1Context *const s)
Definition: mpeg12dec.c:2209
hwaccel_internal.h
Mpeg1Context::sync
int sync
Definition: mpeg12dec.c:95
MpegEncContext::next_pic
MPVWorkPicture next_pic
copy of the next picture structure.
Definition: mpegvideo.h:128
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:798
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
ff_mpv_decode_close
av_cold int ff_mpv_decode_close(AVCodecContext *avctx)
Definition: mpegvideo_dec.c:166
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:800
AVCodecContext::bit_rate
int64_t bit_rate
the average bitrate
Definition: avcodec.h:481
M2V_PARAM
#define M2V_PARAM
Definition: mpeg12dec.c:2671
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:241
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:278
get_bits1
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:391
profiles.h
mpeg_get_qscale
static int mpeg_get_qscale(GetBitContext *const gb, int q_scale_type)
Definition: mpegvideodec.h:80
CC_FORMAT_A53_PART4
@ CC_FORMAT_A53_PART4
Definition: mpeg12dec.c:66
FF_PTR_ADD
#define FF_PTR_ADD(ptr, off)
Definition: internal.h:80
LAST_SKIP_BITS
#define LAST_SKIP_BITS(name, gb, num)
Definition: get_bits.h:235
MB_TYPE_QUANT
#define MB_TYPE_QUANT
Definition: mpegutils.h:48
avpriv_find_start_code
const uint8_t * avpriv_find_start_code(const uint8_t *p, const uint8_t *end, uint32_t *state)
MB_TYPE_BIDIR_MV
#define MB_TYPE_BIDIR_MV
Definition: mpegutils.h:51
lowres
static int lowres
Definition: ffplay.c:330
ff_mpeg1_rl_vlc
RL_VLC_ELEM ff_mpeg1_rl_vlc[680]
Definition: mpeg12.c:137
MB_BTYPE_VLC_BITS
#define MB_BTYPE_VLC_BITS
Definition: mpeg12vlc.h:40
CC_FORMAT_AUTO
@ CC_FORMAT_AUTO
Definition: mpeg12dec.c:65
AV_PIX_FMT_D3D12
@ AV_PIX_FMT_D3D12
Hardware surfaces for Direct3D 12.
Definition: pixfmt.h:440
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
MpegEncContext::slice_context_count
int slice_context_count
number of used thread_contexts
Definition: mpegvideo.h:116
get_vlc2
static av_always_inline int get_vlc2(GetBitContext *s, const VLCElem *table, int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:651
AV_FRAME_DATA_AFD
@ AV_FRAME_DATA_AFD
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: frame.h:90
AVCodecContext::level
int level
Encoding level descriptor.
Definition: avcodec.h:1628
atomic_load_explicit
#define atomic_load_explicit(object, order)
Definition: stdatomic.h:96
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:53
get_bits_bytesize
static int get_bits_bytesize(const GetBitContext *s, int round_up)
Get the size of the GetBitContext's buffer in bytes.
Definition: get_bits.h:268
MpegEncContext::idsp
IDCTDSPContext idsp
Definition: mpegvideo.h:162
ff_mpv_alloc_dummy_frames
int ff_mpv_alloc_dummy_frames(MpegEncContext *s)
Ensure that the dummy frames are allocated according to pict_type if necessary.
Definition: mpegvideo_dec.c:304
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:28
Mpeg1Context::save_chroma_format
int save_chroma_format
Definition: mpeg12dec.c:92
startcode.h
CC_FORMAT_DVD
@ CC_FORMAT_DVD
Definition: mpeg12dec.c:68
IS_INTRA
#define IS_INTRA(x, y)
AVDISCARD_NONKEY
@ AVDISCARD_NONKEY
discard all frames except keyframes
Definition: defs.h:231
check_marker
static int check_marker(void *logctx, GetBitContext *s, const char *msg)
Definition: mpegvideodec.h:89
ERContext::error_count
atomic_int error_count
Definition: error_resilience.h:67
AVCodecContext::flags2
int flags2
AV_CODEC_FLAG2_*.
Definition: avcodec.h:495
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1728
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:550
mpeg2video_options
static const AVOption mpeg2video_options[]
Definition: mpeg12dec.c:2673
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AV_CODEC_FLAG_GRAY
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:302
AVPacket::size
int size
Definition: packet.h:589
dc
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled top and top right vectors is used as motion vector prediction the used motion vector is the sum of the predictor and(mvx_diff, mvy_diff) *mv_scale Intra DC Prediction block[y][x] dc[1]
Definition: snow.txt:400
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
MpegEncContext::qscale
int qscale
QP.
Definition: mpegvideo.h:154
AV_CODEC_ID_IPU
@ AV_CODEC_ID_IPU
Definition: codec_id.h:310
AV_FRAME_DATA_PANSCAN
@ AV_FRAME_DATA_PANSCAN
The data is the AVPanScan struct defined in libavcodec.
Definition: frame.h:53
CC_FORMAT_SCTE20
@ CC_FORMAT_SCTE20
Definition: mpeg12dec.c:67
height
#define height
Definition: dsp.h:89
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:278
MT_FRAME
#define MT_FRAME
Definition: mpeg12dec.c:402
codec_internal.h
shift
static int shift(int a, int b)
Definition: bonk.c:261
IPUContext::flags
int flags
Definition: mpeg12dec.c:2764
MpegEncContext::intra_matrix
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:203
mpeg_field_start
static int mpeg_field_start(Mpeg1Context *s1, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:1224
MpegEncContext::v_edge_pos
int v_edge_pos
horizontal / vertical position of the right/bottom edge (pixel replication)
Definition: mpegvideo.h:101
Mpeg1Context::bit_rate
int64_t bit_rate
Definition: mpeg12dec.c:101
VLCElem
Definition: vlc.h:32
ff_mpeg1video_decoder
const FFCodec ff_mpeg1video_decoder
Definition: mpeg12dec.c:2642
ff_frame_new_side_data
int ff_frame_new_side_data(const AVCodecContext *avctx, AVFrame *frame, enum AVFrameSideDataType type, size_t size, AVFrameSideData **psd)
Wrapper around av_frame_new_side_data, which rejects side data overridden by the demuxer.
Definition: decode.c:2126
AV_RB32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:96
FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
#define FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
The decoder extracts and fills its parameters even if the frame is skipped due to the skip_frame sett...
Definition: codec_internal.h:54
AVFrameSideData::data
uint8_t * data
Definition: frame.h:284
MB_TYPE_SKIP
#define MB_TYPE_SKIP
Definition: mpegutils.h:61
FF_THREAD_SLICE
#define FF_THREAD_SLICE
Decode more than one part of a single frame at once.
Definition: avcodec.h:1573
ff_mpeg_draw_horiz_band
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo_dec.c:444
Mpeg12SliceContext::last_dc
int last_dc[3]
last DC values
Definition: mpeg12dec.c:76
PICTURE_START_CODE
#define PICTURE_START_CODE
Definition: mpeg12.h:31
USER_START_CODE
#define USER_START_CODE
Definition: cavs.h:40
AVCodecContext::skip_bottom
int skip_bottom
Number of macroblock rows at the bottom which are skipped.
Definition: avcodec.h:1690
AVCodecHWConfigInternal
Definition: hwconfig.h:25
MpegEncContext::mbskip_table
uint8_t * mbskip_table
used to avoid copy if macroblock skipped (for black regions for example) and used for B-frame encodin...
Definition: mpegvideo.h:146
ff_mpeg1_default_intra_matrix
const uint16_t ff_mpeg1_default_intra_matrix[256]
Definition: mpeg12data.c:31
diff
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
Definition: vf_paletteuse.c:166
MpegEncContext::context_initialized
int context_initialized
Definition: mpegvideo.h:97
ff_mpv_frame_start
int ff_mpv_frame_start(MpegEncContext *s, AVCodecContext *avctx)
generic function called after decoding the header and before a frame is decoded.
Definition: mpegvideo_dec.c:347
MB_TYPE_INTERLACED
#define MB_TYPE_INTERLACED
Definition: mpegutils.h:45
OPEN_READER
#define OPEN_READER(name, gb)
Definition: get_bits.h:177
Mpeg12SliceContext::gb
GetBitContext gb
Definition: mpeg12dec.c:74
Mpeg12SliceContext::c
MPVContext c
Definition: mpeg12dec.c:73
Mpeg1Context::has_stereo3d
int has_stereo3d
Definition: mpeg12dec.c:85
mpeg_decode_init
static av_cold int mpeg_decode_init(AVCodecContext *avctx)
Definition: mpeg12dec.c:768
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:99
HWACCEL_D3D11VA
#define HWACCEL_D3D11VA(codec)
Definition: hwconfig.h:78
mpegvideodata.h
attributes.h
ff_mpeg1_decode_block_intra
int ff_mpeg1_decode_block_intra(GetBitContext *gb, const uint16_t *quant_matrix, const uint8_t *scantable, int last_dc[3], int16_t *block, int index, int qscale)
Definition: mpeg12.c:183
MV_TYPE_FIELD
#define MV_TYPE_FIELD
2 vectors, one per field
Definition: mpegvideo.h:177
skip_bits1
static void skip_bits1(GetBitContext *s)
Definition: get_bits.h:416
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
HWACCEL_NVDEC
#define HWACCEL_NVDEC(codec)
Definition: hwconfig.h:68
mpeg2video_class
static const AVClass mpeg2video_class
Definition: mpeg12dec.c:2691
MpegEncContext::pict_type
enum AVPictureType pict_type
AV_PICTURE_TYPE_I, AV_PICTURE_TYPE_P, AV_PICTURE_TYPE_B, ...
Definition: mpegvideo.h:156
mpeg_decode_sequence_extension
static void mpeg_decode_sequence_extension(Mpeg1Context *const s1, GetBitContext *const gb)
Definition: mpeg12dec.c:1023
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:126
AVBufferRef::size
size_t size
Size of data in bytes.
Definition: buffer.h:94
ff_mpeg2_video_profiles
const AVProfile ff_mpeg2_video_profiles[]
Definition: profiles.c:116
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:194
CHROMA_444
#define CHROMA_444
Definition: mpegvideo.h:265
emms.h
AV_PIX_FMT_VIDEOTOOLBOX
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
Definition: pixfmt.h:305
ff_print_debug_info
void ff_print_debug_info(const MpegEncContext *s, const MPVPicture *p, AVFrame *pict)
Definition: mpegvideo_dec.c:406
av_assert2
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
Definition: avassert.h:68
MpegEncContext::progressive_frame
int progressive_frame
Definition: mpegvideo.h:269
CHROMA_420
#define CHROMA_420
Definition: mpegvideo.h:263
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
AV_CODEC_FLAG2_SHOW_ALL
#define AV_CODEC_FLAG2_SHOW_ALL
Show all frames before the first keyframe.
Definition: avcodec.h:360
ff_alternate_vertical_scan
const uint8_t ff_alternate_vertical_scan[64]
Definition: mpegvideodata.c:63
AVCodecContext::extradata
uint8_t * extradata
Out-of-band global headers that may be used by some codecs.
Definition: avcodec.h:514
show_bits
static unsigned int show_bits(GetBitContext *s, int n)
Show 1-25 bits.
Definition: get_bits.h:373
internal.h
mpeg_set_cc_format
static void mpeg_set_cc_format(AVCodecContext *avctx, enum Mpeg2ClosedCaptionsFormat format, const char *label)
Definition: mpeg12dec.c:1883
AV_STEREO3D_TOPBOTTOM
@ AV_STEREO3D_TOPBOTTOM
Views are on top of each other.
Definition: stereo3d.h:76
IS_QUANT
#define IS_QUANT(a)
Definition: mpegutils.h:85
MpegEncContext::mb_x
int mb_x
Definition: mpegvideo.h:193
ff_mpeg12_init_vlcs
av_cold void ff_mpeg12_init_vlcs(void)
Definition: mpeg12.c:175
atomic_store_explicit
#define atomic_store_explicit(object, desired, order)
Definition: stdatomic.h:90
FF_DEBUG_STARTCODE
#define FF_DEBUG_STARTCODE
Definition: avcodec.h:1382
MpegEncContext::thread_context
struct MpegEncContext * thread_context[MAX_THREADS]
Definition: mpegvideo.h:112
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_d2q
AVRational av_d2q(double d, int max)
Convert a double precision floating point number to a rational.
Definition: rational.c:106
MB_TYPE_MV_2_MV_DIR
#define MB_TYPE_MV_2_MV_DIR(a)
Definition: mpegutils.h:93
MB_PAT_VLC_BITS
#define MB_PAT_VLC_BITS
Definition: mpeg12vlc.h:38
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:496
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:179
MpegEncContext::last_pic
MPVWorkPicture last_pic
copy of the previous picture structure.
Definition: mpegvideo.h:122
MpegEncContext::intra_vlc_format
int intra_vlc_format
Definition: mpegvideo.h:258
AVCodecContext::chroma_sample_location
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:676
MAX_INDEX
#define MAX_INDEX
Definition: mpeg12dec.c:132
mpeg_decode_motion
static int mpeg_decode_motion(Mpeg12SliceContext *const s, int fcode, int pred)
Definition: mpeg12dec.c:106
MpegEncContext::er
ERContext er
Definition: mpegvideo.h:286
AVCodecContext::height
int height
Definition: avcodec.h:592
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:631
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:760
HWACCEL_VIDEOTOOLBOX
#define HWACCEL_VIDEOTOOLBOX(codec)
Definition: hwconfig.h:74
idctdsp.h
avcodec.h
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
GET_RL_VLC
#define GET_RL_VLC(level, run, name, gb, table, bits, max_depth, need_update)
Definition: get_bits.h:605
ff_zigzag_direct
const uint8_t ff_zigzag_direct[64]
Definition: mathtables.c:137
ff_mpeg12_frame_rate_tab
const AVRational ff_mpeg12_frame_rate_tab[]
Definition: mpeg12framerate.c:24
mpeg_decode_gop
static int mpeg_decode_gop(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:2177
ret
ret
Definition: filter_design.txt:187
AV_EF_AGGRESSIVE
#define AV_EF_AGGRESSIVE
consider things that a sane encoder/muxer should not do as an error
Definition: defs.h:56
pred
static const float pred[4]
Definition: siprdata.h:259
AV_FRAME_DATA_GOP_TIMECODE
@ AV_FRAME_DATA_GOP_TIMECODE
The GOP timecode in 25 bit timecode format.
Definition: frame.h:125
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:81
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
ff_mpeg1_default_non_intra_matrix
const uint16_t ff_mpeg1_default_non_intra_matrix[64]
Definition: mpeg12data.c:42
AVStereo3D::type
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:207
align_get_bits
static const uint8_t * align_get_bits(GetBitContext *s)
Definition: get_bits.h:560
TEX_VLC_BITS
#define TEX_VLC_BITS
Definition: dvdec.c:146
MPVPicture::f
struct AVFrame * f
Definition: mpegpicture.h:59
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
mpeg_get_pixelformat
static enum AVPixelFormat mpeg_get_pixelformat(AVCodecContext *avctx)
Definition: mpeg12dec.c:839
AV_CODEC_FLAG2_CHUNKS
#define AV_CODEC_FLAG2_CHUNKS
Input bitstream might be truncated at a packet boundaries instead of only at frame boundaries.
Definition: avcodec.h:351
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
mpeg12data.h
mpeg1_decode_block_inter
static int mpeg1_decode_block_inter(Mpeg12SliceContext *const s, int16_t *block, int n)
Definition: mpeg12dec.c:142
skip_1stop_8data_bits
static int skip_1stop_8data_bits(GetBitContext *gb)
Definition: get_bits.h:699
AVCodecContext
main external API structure.
Definition: avcodec.h:431
AVCodecContext::active_thread_type
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:1580
av_timecode_make_mpeg_tc_string
char * av_timecode_make_mpeg_tc_string(char *buf, uint32_t tc25bit)
Get the timecode string from the 25-bit timecode format (MPEG GOP format).
Definition: timecode.c:147
MpegEncContext::intra_dc_precision
int intra_dc_precision
Definition: mpegvideo.h:253
AVCodecContext::execute
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:1591
SHOW_UBITS
#define SHOW_UBITS(name, gb, num)
Definition: get_bits.h:247
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:280
mpeg12dec.h
AVCHROMA_LOC_CENTER
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
Definition: pixfmt.h:799
AVRational::den
int den
Denominator.
Definition: rational.h:60
error_resilience.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
FF_HW_CALL
#define FF_HW_CALL(avctx, function,...)
Definition: hwaccel_internal.h:173
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
AVCodecContext::profile
int profile
profile
Definition: avcodec.h:1618
AVFrame::metadata
AVDictionary * metadata
metadata.
Definition: frame.h:705
Mpeg1Context::cc_format
enum Mpeg2ClosedCaptionsFormat cc_format
Definition: mpeg12dec.c:87
sign_extend
static av_const int sign_extend(int val, unsigned bits)
Definition: mathops.h:132
ff_mpv_frame_end
void ff_mpv_frame_end(MpegEncContext *s)
Definition: mpegvideo_dec.c:398
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:117
Mpeg1Context::slice_count
int slice_count
Definition: mpeg12dec.c:90
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:76
Windows::Graphics::DirectX::Direct3D11::p
IDirect3DDxgiInterfaceAccess _COM_Outptr_ void ** p
Definition: vsrc_gfxcapture_winrt.hpp:53
MpegEncContext::resync_mb_x
int resync_mb_x
x position of last resync marker
Definition: mpegvideo.h:209
FF_CODEC_PROPERTY_CLOSED_CAPTIONS
#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS
Definition: avcodec.h:1639
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
av_buffer_realloc
int av_buffer_realloc(AVBufferRef **pbuf, size_t size)
Reallocate a given buffer.
Definition: buffer.c:183
ff_mb_ptype_vlc
VLCElem ff_mb_ptype_vlc[64]
Definition: mpeg12.c:133
AVCodecContext::debug
int debug
debug
Definition: avcodec.h:1374
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
AVCodecContext::coded_width
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:607
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:279
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
mpeg_decode_end
static av_cold int mpeg_decode_end(AVCodecContext *avctx)
Definition: mpeg12dec.c:2634
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
ff_mpv_reconstruct_mb
void ff_mpv_reconstruct_mb(MPVContext *s, int16_t block[][64])
Definition: mpegvideo_dec.c:1124
IDCTDSPContext::idct_permutation
uint8_t idct_permutation[64]
IDCT input permutation.
Definition: idctdsp.h:86
ff_ipu_decoder
const FFCodec ff_ipu_decoder
Definition: mpeg12dec.c:2885
av_stereo3d_create_side_data
AVStereo3D * av_stereo3d_create_side_data(AVFrame *frame)
Allocate a complete AVFrameSideData and add it to the frame.
Definition: stereo3d.c:54
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:282
load_matrix
static int load_matrix(MPVContext *const s, GetBitContext *const gb, uint16_t matrix0[64], uint16_t matrix1[64], int intra)
Definition: mpeg12dec.c:1127
ER_MV_END
#define ER_MV_END
Definition: error_resilience.h:34
MpegEncContext::first_field
int first_field
is 1 for the first field of a field picture 0 otherwise
Definition: mpegvideo.h:272
MpegEncContext::q_scale_type
int q_scale_type
Definition: mpegvideo.h:257
AVCodecContext::codec_tag
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:456
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
MV_DIR_FORWARD
#define MV_DIR_FORWARD
Definition: mpegvideo.h:170
ff_tlog
#define ff_tlog(a,...)
Definition: tableprint_vlc.h:29
Mpeg12SliceContext::DECLARE_ALIGNED_32
DECLARE_ALIGNED_32(int16_t, block)[12][64]
AVPacket
This structure stores compressed data.
Definition: packet.h:565
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:458
ScanTable::permutated
uint8_t permutated[64]
Definition: mpegvideo.h:48
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:86
mpeg_decode_picture_coding_extension
static int mpeg_decode_picture_coding_extension(Mpeg1Context *const s1, GetBitContext *const gb)
Definition: mpeg12dec.c:1165
HWACCEL_VAAPI
#define HWACCEL_VAAPI(codec)
Definition: hwconfig.h:70
mpeg_er.h
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:592
int32_t
int32_t
Definition: audioconvert.c:56
imgutils.h
mpeg_decode_picture_display_extension
static void mpeg_decode_picture_display_extension(Mpeg1Context *const s1, GetBitContext *const gb)
Definition: mpeg12dec.c:1092
AVCodecContext::properties
attribute_deprecated unsigned properties
Properties of the stream that gets decoded.
Definition: avcodec.h:1637
AV_CODEC_CAP_DRAW_HORIZ_BAND
#define AV_CODEC_CAP_DRAW_HORIZ_BAND
Decoder can use draw_horiz_band callback.
Definition: codec.h:44
AVStereo3DType
AVStereo3DType
List of possible 3D Types.
Definition: stereo3d.h:48
mpeg2_decode_block_intra
static int mpeg2_decode_block_intra(Mpeg12SliceContext *const s, int16_t *block, int n)
Definition: mpeg12dec.c:311
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
Mpeg1Context::frame_rate_ext
AVRational frame_rate_ext
Definition: mpeg12dec.c:93
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
AVPanScan::height
int height
Definition: defs.h:267
MKTAG
#define MKTAG(a, b, c, d)
Definition: macros.h:55
ff_mb_btype_vlc
VLCElem ff_mb_btype_vlc[64]
Definition: mpeg12.c:134
MpegEncContext::resync_mb_y
int resync_mb_y
y position of last resync marker
Definition: mpegvideo.h:210
mpeg_decode_user_data
static void mpeg_decode_user_data(AVCodecContext *avctx, const uint8_t *p, int buf_size)
Definition: mpeg12dec.c:2108
h
h
Definition: vp9dsp_template.c:2070
MpegEncContext::end_mb_y
int end_mb_y
end mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y)
Definition: mpegvideo.h:110
Mpeg2ClosedCaptionsFormat
Mpeg2ClosedCaptionsFormat
Definition: mpeg12dec.c:64
ER_AC_END
#define ER_AC_END
Definition: error_resilience.h:32
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:203
av_image_check_sar
int av_image_check_sar(unsigned int w, unsigned int h, AVRational sar)
Check if the given sample aspect ratio of an image is valid.
Definition: imgutils.c:323
MV_VLC_BITS
#define MV_VLC_BITS
Definition: mpeg12vlc.h:34
Mpeg1Context::timecode_frame_start
int64_t timecode_frame_start
Definition: mpeg12dec.c:102
width
#define width
Definition: dsp.h:89
MpegEncContext::start_mb_y
int start_mb_y
start mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y)
Definition: mpegvideo.h:109
AVDISCARD_NONREF
@ AVDISCARD_NONREF
discard all non reference
Definition: defs.h:228
MpegEncContext::alternate_scan
int alternate_scan
Definition: mpegvideo.h:259
DECODE_SLICE_OK
#define DECODE_SLICE_OK
Definition: mpeg12dec.c:1351
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:54
DECODE_SLICE_ERROR
#define DECODE_SLICE_ERROR
Definition: mpeg12dec.c:1350
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
MpegEncContext
MpegEncContext.
Definition: mpegvideo.h:63
MpegEncContext::codec_id
enum AVCodecID codec_id
Definition: mpegvideo.h:90
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:616
MB_TYPE_FORWARD_MV
#define MB_TYPE_FORWARD_MV
Definition: mpegutils.h:49
decode_dc
static int decode_dc(GetBitContext *gb, int component)
Definition: mpeg12dec.h:30
Mpeg1Context::afd
uint8_t afd
Definition: mpeg12dec.c:88
Mpeg1Context
Definition: mpeg12dec.c:81
MpegEncContext::chroma_intra_matrix
uint16_t chroma_intra_matrix[64]
Definition: mpegvideo.h:204
Mpeg1Context::extradata_decoded
int extradata_decoded
Definition: mpeg12dec.c:99
get_dmv
static int get_dmv(Mpeg12SliceContext *const s)
Definition: mpeg12dec.c:392
MB_TYPE_INTRA
#define MB_TYPE_INTRA
Definition: mpegutils.h:64
MBINCR_VLC_BITS
#define MBINCR_VLC_BITS
Definition: mpeg12vlc.h:37
MpegEncContext::chroma_format
int chroma_format
Definition: mpegvideo.h:262
MpegEncContext::codec_tag
int codec_tag
internal codec_tag upper case converted from avctx codec_tag
Definition: mpegvideo.h:93