FFmpeg
amfdec.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
21 #include "amfdec.h"
22 #include "codec_internal.h"
23 #include "hwconfig.h"
24 #include "libavutil/imgutils.h"
25 #include "libavutil/mem.h"
26 #include "libavutil/time.h"
27 #include "decode.h"
28 #include "decode_bsf.h"
30 
31 #if CONFIG_D3D11VA
33 #endif
34 #if CONFIG_DXVA2
35 #define COBJMACROS
37 #endif
38 
39 #ifdef _WIN32
40 #include "compat/w32dlfcn.h"
41 #else
42 #include <dlfcn.h>
43 #endif
44 //will be in public headers soon
45 #define AMF_VIDEO_DECODER_OUTPUT_FORMAT L"OutputDecodeFormat"
46 
53 };
54 
55 static const AVCodecHWConfigInternal *const amf_hw_configs[] = {
56  &(const AVCodecHWConfigInternal) {
57  .public = {
61  .device_type = AV_HWDEVICE_TYPE_AMF,
62  },
63  .hwaccel = NULL,
64  },
65  NULL
66 };
67 
68 static void amf_free_amfsurface(void *opaque, uint8_t *data)
69 {
70  AMFSurface *surface = (AMFSurface*)(data);
71  surface->pVtbl->Release(surface);
72 }
73 
75 {
76  if( AMF_GET_MAJOR_VERSION(amf_device_ctx->version) <= 1 &&
77  AMF_GET_MINOR_VERSION(amf_device_ctx->version) <= 4 &&
78  AMF_GET_SUBMINOR_VERSION(amf_device_ctx->version) < 36)
79  return 1;
80  return 0;
81 }
82 
83 static int amf_init_decoder(AVCodecContext *avctx)
84 {
86  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
87  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext*)hw_device_ctx->hwctx;
88  const wchar_t *codec_id = NULL;
89  AMF_RESULT res;
90  AMFBuffer *buffer;
91  amf_int64 color_profile;
92  int pool_size = 36;
93  // way-around for older drivers that don't support dynamic bitness detection -
94  // define HEVC and VP9 10-bit based on container info
95  int no_bitness_detect = amf_legacy_driver_no_bitness_detect(amf_device_ctx);
96 
97  ctx->drain = 0;
98  ctx->resolution_changed = 0;
99 
100  switch (avctx->codec->id) {
101  case AV_CODEC_ID_H264:
102  codec_id = AMFVideoDecoderUVD_H264_AVC;
103  break;
104  case AV_CODEC_ID_HEVC: {
105  codec_id = AMFVideoDecoderHW_H265_HEVC;
106  if(no_bitness_detect){
107  if(avctx->pix_fmt == AV_PIX_FMT_YUV420P10)
108  codec_id = AMFVideoDecoderHW_H265_MAIN10;
109  }
110  } break;
111  case AV_CODEC_ID_VP9: {
112  codec_id = AMFVideoDecoderHW_VP9;
113  if(no_bitness_detect){
114  if(avctx->pix_fmt == AV_PIX_FMT_YUV420P10)
115  codec_id = AMFVideoDecoderHW_VP9_10BIT;
116  }
117  } break;
118  case AV_CODEC_ID_AV1:
119  codec_id = AMFVideoDecoderHW_AV1;
120  break;
121  default:
122  break;
123  }
124  AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
125 
126  res = amf_device_ctx->factory->pVtbl->CreateComponent(amf_device_ctx->factory, amf_device_ctx->context, codec_id, &ctx->decoder);
127  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
128 
129  // Color Metadata
130  /// Color Range (Support for older Drivers)
131  if (avctx->color_range == AVCOL_RANGE_JPEG) {
132  AMF_ASSIGN_PROPERTY_BOOL(res, ctx->decoder, AMF_VIDEO_DECODER_FULL_RANGE_COLOR, 1);
133  } else if (avctx->color_range != AVCOL_RANGE_UNSPECIFIED) {
134  AMF_ASSIGN_PROPERTY_BOOL(res, ctx->decoder, AMF_VIDEO_DECODER_FULL_RANGE_COLOR, 0);
135  }
136  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN;
137  switch (avctx->colorspace) {
138  case AVCOL_SPC_SMPTE170M:
139  if (avctx->color_range == AVCOL_RANGE_JPEG) {
140  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_601;
141  } else {
142  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_601;
143  }
144  break;
145  case AVCOL_SPC_BT709:
146  if (avctx->color_range == AVCOL_RANGE_JPEG) {
147  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_709;
148  } else {
149  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_709;
150  }
151  break;
153  case AVCOL_SPC_BT2020_CL:
154  if (avctx->color_range == AVCOL_RANGE_JPEG) {
155  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020;
156  } else {
157  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020;
158  }
159  break;
160  }
161  if (color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN)
162  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_COLOR_PROFILE, color_profile);
163  if (avctx->color_trc != AVCOL_TRC_UNSPECIFIED)
164  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_COLOR_TRANSFER_CHARACTERISTIC, (amf_int64)avctx->color_trc);
165 
167  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_COLOR_PRIMARIES, (amf_int64)avctx->color_primaries);
168 
169  if (ctx->timestamp_mode != -1)
170  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_TIMESTAMP_MODE, ctx->timestamp_mode);
171  if (ctx->decoder_mode != -1)
172  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_REORDER_MODE, ctx->decoder_mode);
173  if (ctx->dpb_size != -1)
174  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_DPB_SIZE, ctx->dpb_size);
175  if (ctx->lowlatency != -1)
176  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_LOW_LATENCY, ctx->lowlatency);
177  if (ctx->smart_access_video != -1) {
178  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_ENABLE_SMART_ACCESS_VIDEO, ctx->smart_access_video != 0);
179  if (res != AMF_OK) {
180  av_log(avctx, AV_LOG_ERROR, "The Smart Access Video is not supported by AMF decoder.\n");
181  return AVERROR(EINVAL);
182  } else {
183  av_log(avctx, AV_LOG_INFO, "The Smart Access Video (%d) is set.\n", ctx->smart_access_video);
184  // Set low latency mode if Smart Access Video is enabled
185  if (ctx->smart_access_video != 0) {
186  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_LOW_LATENCY, true);
187  av_log(avctx, AV_LOG_INFO, "The Smart Access Video set low latency mode for decoder.\n");
188  }
189  }
190  }
191  if (ctx->skip_transfer_sav != -1)
192  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_SKIP_TRANSFER_SMART_ACCESS_VIDEO, ctx->skip_transfer_sav);
193 
194  if (ctx->copy_output != -1)
195  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_SURFACE_COPY, ctx->copy_output);
196 
197  if (avctx->extradata_size) {
198  const uint8_t *extradata;
199  int extradata_size;
200  ff_decode_get_extradata(avctx, &extradata, &extradata_size);
201  res = amf_device_ctx->context->pVtbl->AllocBuffer(amf_device_ctx->context, AMF_MEMORY_HOST, extradata_size, &buffer);
202  if (res == AMF_OK) {
203  memcpy(buffer->pVtbl->GetNative(buffer), extradata, extradata_size);
204  AMF_ASSIGN_PROPERTY_INTERFACE(res,ctx->decoder, AMF_VIDEO_DECODER_EXTRADATA, buffer);
205  buffer->pVtbl->Release(buffer);
206  buffer = NULL;
207  }
208  }
209  if (ctx->surface_pool_size == -1) {
210  ctx->surface_pool_size = pool_size;
211  if (avctx->extra_hw_frames > 0)
212  ctx->surface_pool_size += avctx->extra_hw_frames;
213  if (avctx->active_thread_type & FF_THREAD_FRAME)
214  ctx->surface_pool_size += avctx->thread_count;
215  }
216 
217  //at the moment, there is such a restriction in AMF.
218  //when it is possible, I will remove this code
219  if (ctx->surface_pool_size > 100)
220  ctx->surface_pool_size = 100;
221 
222  AMF_ASSIGN_PROPERTY_INT64(res, ctx->decoder, AMF_VIDEO_DECODER_SURFACE_POOL_SIZE, ctx->surface_pool_size);
223  res = ctx->decoder->pVtbl->Init(ctx->decoder, AMF_SURFACE_UNKNOWN, avctx->width, avctx->height);
224  if (res != AMF_OK) {
225  av_log(avctx, AV_LOG_ERROR, "Decoder initialization failed with error %d\n", res);
226  return AVERROR(EINVAL);
227  }
228  return 0;
229 }
230 
232 {
233  AMFDecoderContext *ctx = avctx->priv_data;
234 
235  if (ctx->decoder) {
236  ctx->decoder->pVtbl->Terminate(ctx->decoder);
237  ctx->decoder->pVtbl->Release(ctx->decoder);
238  ctx->decoder = NULL;
239  }
240 
241  av_buffer_unref(&ctx->device_ctx_ref);
242  av_packet_free(&ctx->in_pkt);
243 
244  return 0;
245 }
246 
247 static int amf_init_frames_context(AVCodecContext *avctx, int sw_format, int new_width, int new_height)
248 {
249  int ret;
250  AVHWDeviceContext *hwdev_ctx;
251  AVHWFramesContext *hwframes_ctx;
253  if (!avctx->hw_frames_ctx || !avctx->hw_device_ctx)
254  return 0;
255  hwdev_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
256  hwframes_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
257  ctx = avctx->priv_data;
258 
259  if (hwdev_ctx->type != AV_HWDEVICE_TYPE_AMF)
260  return 0;
261 
262  hwframes_ctx->width = new_width;
263  hwframes_ctx->height = new_height;
264  hwframes_ctx->format = AV_PIX_FMT_AMF_SURFACE;
265  hwframes_ctx->sw_format = sw_format;
266  hwframes_ctx->initial_pool_size = ctx->surface_pool_size + 8;
267 
269  if (ret < 0) {
270  av_log(NULL, AV_LOG_ERROR, "Error initializing a AMF frame pool\n");
272  return ret;
273  }
274  return 0;
275 }
276 
277 static int amf_decode_init(AVCodecContext *avctx)
278 {
279  AMFDecoderContext *ctx = avctx->priv_data;
280  int ret;
281  ctx->in_pkt = av_packet_alloc();
282  if (!ctx->in_pkt)
283  return AVERROR(ENOMEM);
284 
285  if (avctx->hw_device_ctx && !avctx->hw_frames_ctx) {
286  AVHWDeviceContext *hwdev_ctx;
287  hwdev_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
288  if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
289  {
290  ctx->device_ctx_ref = av_buffer_ref(avctx->hw_device_ctx);
292 
293  AMF_GOTO_FAIL_IF_FALSE(avctx, !!avctx->hw_frames_ctx, AVERROR(ENOMEM), "av_hwframe_ctx_alloc failed\n");
294  } else {
296  AMF_GOTO_FAIL_IF_FALSE(avctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
297  }
298  } else {
299  ret = av_hwdevice_ctx_create(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
300  AMF_GOTO_FAIL_IF_FALSE(avctx, ret == 0, ret, "Failed to create hardware device context (AMF) : %s\n", av_err2str(ret));
301  }
302  if ((ret = amf_init_decoder(avctx)) == 0) {
303  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
304  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext*)hw_device_ctx->hwctx;
305  enum AVPixelFormat surf_pix_fmt = AV_PIX_FMT_NONE;
306 
307  if(amf_legacy_driver_no_bitness_detect(amf_device_ctx)){
308  // if bitness detection is not supported in legacy driver use format from container
309  switch (avctx->pix_fmt) {
310  case AV_PIX_FMT_YUV420P:
311  case AV_PIX_FMT_YUVJ420P:
312  surf_pix_fmt = AV_PIX_FMT_NV12; break;
314  surf_pix_fmt = AV_PIX_FMT_P010; break;
315  }
316  }else{
317  AMFVariantStruct format_var = {0};
318 
319  ret = ctx->decoder->pVtbl->GetProperty(ctx->decoder, AMF_VIDEO_DECODER_OUTPUT_FORMAT, &format_var);
320  AMF_GOTO_FAIL_IF_FALSE(avctx, ret == AMF_OK, AVERROR(EINVAL), "Failed to get output format (AMF) : %d\n", ret);
321 
322  surf_pix_fmt = av_amf_to_av_format(format_var.int64Value);
323  }
324  if(avctx->hw_frames_ctx)
325  {
326  // this values should be set for avcodec_open2
327  // will be updated after header decoded if not true.
328  if(surf_pix_fmt == AV_PIX_FMT_NONE)
329  surf_pix_fmt = AV_PIX_FMT_NV12; // for older drivers
330  if (!avctx->coded_width)
331  avctx->coded_width = 1280;
332  if (!avctx->coded_height)
333  avctx->coded_height = 720;
334  ret = amf_init_frames_context(avctx, surf_pix_fmt, avctx->coded_width, avctx->coded_height);
335  AMF_GOTO_FAIL_IF_FALSE(avctx, ret == 0, ret, "Failed to init frames context (AMF) : %s\n", av_err2str(ret));
336  }
337  else
338  avctx->pix_fmt = surf_pix_fmt;
339 
340  return 0;
341  }
342 fail:
343  amf_decode_close(avctx);
344  return ret;
345 }
346 
347 static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
348 {
349  AMF_RESULT res;
350  AMFVariantStruct var;
351  res = AMFVariantInit(&var);
352  if (res == AMF_OK) {
353  res = object->pVtbl->GetProperty(object, name, &var);
354  if (res == AMF_OK) {
355  if (var.type == AMF_VARIANT_INTERFACE) {
356  AMFGuid guid_AMFBuffer = IID_AMFBuffer();
357  AMFInterface *amf_interface = AMFVariantInterface(&var);
358  res = amf_interface->pVtbl->QueryInterface(amf_interface, &guid_AMFBuffer, (void**)val);
359  } else {
360  res = AMF_INVALID_DATA_TYPE;
361  }
362  }
363  AMFVariantClear(&var);
364  }
365  return res;
366 }
367 
368 static int amf_amfsurface_to_avframe(AVCodecContext *avctx, AMFSurface* surface, AVFrame *frame)
369 {
370  AMFVariantStruct var = {0};
371  AMFPlane *plane;
372  int i;
373  int ret;
374  int format_amf;
375 
376  if (avctx->hw_device_ctx && ((AVHWDeviceContext*)avctx->hw_device_ctx->data)->type == AV_HWDEVICE_TYPE_AMF) {
377  // prepare frame similar to ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF);
378 
379  ret = ff_decode_frame_props(avctx, frame);
380  if (ret < 0)
381  return ret;
382 
383  avctx->sw_pix_fmt = avctx->pix_fmt;
384 
386  if (ret < 0)
387  return ret;
388  frame->width = avctx->width;
389  frame->height = avctx->height;
390 
391  ////
392  frame->buf[0] = av_buffer_create((uint8_t *)surface, sizeof(surface),
393  amf_free_amfsurface, (void*)avctx,
395  AMF_RETURN_IF_FALSE(avctx, !!frame->buf[0], AVERROR(ENOMEM), "av_buffer_create for amf surface failed.");
396 
397  frame->data[0] = (uint8_t *)surface;
398  frame->format = AV_PIX_FMT_AMF_SURFACE;
399  format_amf = surface->pVtbl->GetFormat(surface);
400  avctx->sw_pix_fmt = av_amf_to_av_format(format_amf);
401  frame->hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
402  } else {
403  ret = surface->pVtbl->Convert(surface, AMF_MEMORY_HOST);
404  AMF_RETURN_IF_FALSE(avctx, ret == AMF_OK, AVERROR_UNKNOWN, "Convert(amf::AMF_MEMORY_HOST) failed with error %d\n", ret);
405 
406  for (i = 0; i < surface->pVtbl->GetPlanesCount(surface); i++) {
407  plane = surface->pVtbl->GetPlaneAt(surface, i);
408  frame->data[i] = plane->pVtbl->GetNative(plane);
409  frame->linesize[i] = plane->pVtbl->GetHPitch(plane);
410  }
411 
412  frame->buf[0] = av_buffer_create((uint8_t *)surface, sizeof(surface),
413  amf_free_amfsurface, (void*)avctx,
415  AMF_RETURN_IF_FALSE(avctx, !!frame->buf[0], AVERROR(ENOMEM), "av_buffer_create for amf surface failed.");
416 
417  format_amf = surface->pVtbl->GetFormat(surface);
418  frame->format = av_amf_to_av_format(format_amf);
419  }
420 
421  frame->width = avctx->width;
422  frame->height = avctx->height;
423 
424  frame->pts = surface->pVtbl->GetPts(surface);
425 
426  surface->pVtbl->GetProperty(surface, L"FFMPEG:dts", &var);
427  frame->pkt_dts = var.int64Value;
428 
429  frame->duration = surface->pVtbl->GetDuration(surface);
430  if (frame->duration < 0)
431  frame->duration = 0;
432 
433  frame->color_range = avctx->color_range;
434  frame->colorspace = avctx->colorspace;
435  frame->color_trc = avctx->color_trc;
436  frame->color_primaries = avctx->color_primaries;
437 
438  if (frame->color_trc == AVCOL_TRC_SMPTE2084) {
439  AMFBuffer * hdrmeta_buffer = NULL;
440  ret = amf_get_property_buffer((AMFData *)surface, AMF_VIDEO_DECODER_HDR_METADATA, &hdrmeta_buffer);
441  if (hdrmeta_buffer != NULL) {
442  AMFHDRMetadata * hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
443  if (ret != AMF_OK)
444  return ret;
445  if (hdrmeta != NULL) {
447  const int chroma_den = 50000;
448  const int luma_den = 10000;
449 
450  if (!mastering)
451  return AVERROR(ENOMEM);
452 
453  mastering->display_primaries[0][0] = av_make_q(hdrmeta->redPrimary[0], chroma_den);
454  mastering->display_primaries[0][1] = av_make_q(hdrmeta->redPrimary[1], chroma_den);
455 
456  mastering->display_primaries[1][0] = av_make_q(hdrmeta->greenPrimary[0], chroma_den);
457  mastering->display_primaries[1][1] = av_make_q(hdrmeta->greenPrimary[1], chroma_den);
458 
459  mastering->display_primaries[2][0] = av_make_q(hdrmeta->bluePrimary[0], chroma_den);
460  mastering->display_primaries[2][1] = av_make_q(hdrmeta->bluePrimary[1], chroma_den);
461 
462  mastering->white_point[0] = av_make_q(hdrmeta->whitePoint[0], chroma_den);
463  mastering->white_point[1] = av_make_q(hdrmeta->whitePoint[1], chroma_den);
464 
465  mastering->max_luminance = av_make_q(hdrmeta->maxMasteringLuminance, luma_den);
466  mastering->min_luminance = av_make_q(hdrmeta->maxMasteringLuminance, luma_den);
467 
468  mastering->has_luminance = 1;
469  mastering->has_primaries = 1;
470  if (hdrmeta->maxContentLightLevel) {
472 
473  if (!light)
474  return AVERROR(ENOMEM);
475 
476  light->MaxCLL = hdrmeta->maxContentLightLevel;
477  light->MaxFALL = hdrmeta->maxFrameAverageLightLevel;
478  }
479  }
480  }
481  }
482  return 0;
483 }
484 
485 static AMF_RESULT amf_receive_frame(AVCodecContext *avctx, AVFrame *frame)
486 {
487  AMFDecoderContext *ctx = avctx->priv_data;
488  AMF_RESULT ret = AMF_OK;
489  AMFSurface *surface = NULL;
490  AMFData *data_out = NULL;
491 
492  ret = ctx->decoder->pVtbl->QueryOutput(ctx->decoder, &data_out);
493  if (ret != AMF_OK && ret != AMF_REPEAT) {
494  return ret;
495  }
496  if (data_out == NULL) {
497  return AMF_REPEAT;
498  }
499 
500  if (data_out) {
501  AMFGuid guid = IID_AMFSurface();
502  data_out->pVtbl->QueryInterface(data_out, &guid, (void**)&surface); // query for buffer interface
503  data_out->pVtbl->Release(data_out);
504  data_out = NULL;
505  }
506 
507  ret = amf_amfsurface_to_avframe(avctx, surface, frame);
508  AMF_GOTO_FAIL_IF_FALSE(avctx, ret >= 0, AMF_FAIL, "Failed to convert AMFSurface to AVFrame = %d\n", ret);
509  return AMF_OK;
510 fail:
511 
512  if (surface) {
513  surface->pVtbl->Release(surface);
514  surface = NULL;
515  }
516  return ret;
517 }
518 
519 static AMF_RESULT amf_update_buffer_properties(AVCodecContext *avctx, AMFBuffer* buffer, const AVPacket* pkt)
520 {
521  AMF_RESULT res;
522 
523  AMF_RETURN_IF_FALSE(avctx, buffer != NULL, AMF_INVALID_ARG, "update_buffer_properties() - buffer not passed in");
524  AMF_RETURN_IF_FALSE(avctx, pkt != NULL, AMF_INVALID_ARG, "update_buffer_properties() - packet not passed in");
525  buffer->pVtbl->SetPts(buffer, pkt->pts);
526  buffer->pVtbl->SetDuration(buffer, pkt->duration);
527  AMF_ASSIGN_PROPERTY_INT64(res, buffer, L"FFMPEG:dts", pkt->dts);
528  if (res != AMF_OK)
529  av_log(avctx, AV_LOG_VERBOSE, "Failed to assign dts value.");
530  return AMF_OK;
531 }
532 
533 static AMF_RESULT amf_buffer_from_packet(AVCodecContext *avctx, const AVPacket* pkt, AMFBuffer** buffer)
534 {
535  AMFDecoderContext *ctx = avctx->priv_data;
536  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
537  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
538  AMFContext *ctxt = amf_device_ctx->context;
539  void *mem;
540  AMF_RESULT err;
541  AMFBuffer *buf = NULL;
542 
543  AMF_RETURN_IF_FALSE(ctxt, pkt != NULL, AMF_INVALID_ARG, "amf_buffer_from_packet() - packet not passed in");
544  AMF_RETURN_IF_FALSE(ctxt, buffer != NULL, AMF_INVALID_ARG, "amf_buffer_from_packet() - buffer pointer not passed in");
545 
546  err = ctxt->pVtbl->AllocBuffer(ctxt, AMF_MEMORY_HOST, pkt->size + AV_INPUT_BUFFER_PADDING_SIZE, buffer);
547  AMF_RETURN_IF_FALSE(ctxt, err == AMF_OK, err, "amf_buffer_from_packet() - failed");
548  buf = *buffer;
549  err = buf->pVtbl->SetSize(buf, pkt->size);
550  AMF_RETURN_IF_FALSE(ctxt, err == AMF_OK, err, "amf_buffer_from_packet() - SetSize failed");
551  // get the memory location and check the buffer was indeed allocated
552  mem = buf->pVtbl->GetNative(buf);
553  AMF_RETURN_IF_FALSE(ctxt, mem != NULL, AMF_INVALID_POINTER, "amf_buffer_from_packet() - GetNative failed");
554 
555  // copy the packet memory and clear data padding
556  memcpy(mem, pkt->data, pkt->size);
557  memset((amf_int8*)(mem)+pkt->size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
558 
559  return amf_update_buffer_properties(avctx, buf, pkt);
560 }
561 
562 static int amf_decode_frame(AVCodecContext *avctx, struct AVFrame *frame)
563 {
564  AMFDecoderContext *ctx = avctx->priv_data;
565  AMFBuffer *buf;
566  AMF_RESULT res;
567  int got_frame = 0;
568  AVPacket *avpkt = ctx->in_pkt;
569 
570  if (!ctx->decoder)
571  return AVERROR(EINVAL);
572 
573  // get packet if needed
574  if(!ctx->drain){
575  if(ctx->resolution_changed)
576  ctx->resolution_changed = 0;
577  else{
578  int ret;
579  av_packet_unref(avpkt);
580  ret = ff_decode_get_packet(avctx, avpkt);
581  if (ret < 0 && ret != AVERROR_EOF)
582  return ret;
583  if (ret == AVERROR_EOF) {
584  //nothing to consume, start external drain
585  ctx->decoder->pVtbl->Drain(ctx->decoder);
586  ctx->drain = 1;
587  }
588  }
589  }
590 
591  if(!ctx->drain){
592  // submit frame
593  res = amf_buffer_from_packet(avctx, avpkt, &buf);
594  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, 0, "Cannot convert AVPacket to AMFbuffer");
595  do{
596  res = ctx->decoder->pVtbl->SubmitInput(ctx->decoder, (AMFData*) buf);
597  if(res == AMF_DECODER_NO_FREE_SURFACES)
598  {
599  av_usleep(100);
600  }
601  } while (res == AMF_DECODER_NO_FREE_SURFACES);
602 
603  buf->pVtbl->Release(buf);
604 
605  if(res == AMF_DECODER_NO_FREE_SURFACES) {
606  // input is not consumed, need to QueryOutput and submit again
607  av_log(avctx, AV_LOG_VERBOSE, "SubmitInput() returned NO_FREE_SURFACES and came out of loop - should never happen\n");
608  res = AMF_OK;
609  } else if (res == AMF_RESOLUTION_CHANGED) {
610  //input is not consumed, start internal drain
611  ctx->decoder->pVtbl->Drain(ctx->decoder);
612  ctx->drain = 1;
613  // process resolution_changed when internal drain is complete
614  ctx->resolution_changed = 1;
615  res = AMF_OK;
616  } else if (res != AMF_OK && res != AMF_NEED_MORE_INPUT && res != AMF_REPEAT) {
617  av_log(avctx, AV_LOG_ERROR, "SubmitInput() returned error %d\n", res);
618  return AVERROR(EINVAL);
619  }
620  }
621 
622  res = amf_receive_frame(avctx, frame);
623  if (res == AMF_OK)
624  got_frame = 1;
625  else if (res == AMF_REPEAT)
626  // decoder has no output yet
627  res = AMF_OK;
628  else if (res == AMF_EOF) {
629  // drain is complete
630  ctx->drain = 0;
631  if(ctx->resolution_changed){
632  // re-initialze decoder
633  AMFVariantStruct size_var = {0};
634  AMFVariantStruct format_var = {0};
635  res = ctx->decoder->pVtbl->GetProperty(ctx->decoder, AMF_VIDEO_DECODER_CURRENT_SIZE, &size_var);
636  if (res != AMF_OK) {
637  return AVERROR(EINVAL);
638  }
639 
640  avctx->width = size_var.sizeValue.width;
641  avctx->height = size_var.sizeValue.height;
642  avctx->coded_width = size_var.sizeValue.width;
643  avctx->coded_height = size_var.sizeValue.height;
644  res = ctx->decoder->pVtbl->ReInit(ctx->decoder, avctx->width, avctx->height);
645  if (res != AMF_OK) {
646  av_log(avctx, AV_LOG_ERROR, "ReInit() returned %d\n", res);
647  return AVERROR(EINVAL);
648  }
649  res = ctx->decoder->pVtbl->GetProperty(ctx->decoder, AMF_VIDEO_DECODER_OUTPUT_FORMAT, &format_var);
650  if (res == AMF_OK) {
651  res = amf_init_frames_context(avctx, av_amf_to_av_format(format_var.int64Value), avctx->coded_width, avctx->coded_height);
652  }
653 
654  if (res < 0)
655  return AVERROR(EINVAL);
656  }else
657  return AVERROR_EOF;
658  } else {
659  av_log(avctx, AV_LOG_ERROR, "Unkown result from QueryOutput %d\n", res);
660  }
661  return got_frame ? 0 : AVERROR(EAGAIN);
662 }
663 
664 static void amf_decode_flush(AVCodecContext *avctx)
665 {
666  AMFDecoderContext *ctx = avctx->priv_data;
667  ctx->decoder->pVtbl->Flush(ctx->decoder);
668 }
669 
670 #define OFFSET(x) offsetof(AMFDecoderContext, x)
671 #define VD AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
672 
673 static const AVOption options[] = {
674  // Decoder mode
675  { "decoder_mode", "Decoder mode", OFFSET(decoder_mode), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AMF_VIDEO_DECODER_MODE_LOW_LATENCY, VD, "decoder_mode" },
676  { "regular", "DPB delay is based on number of reference frames + 1", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_VIDEO_DECODER_MODE_REGULAR }, 0, 0, VD, "decoder_mode" },
677  { "compliant", "DPB delay is based on profile - up to 16", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_VIDEO_DECODER_MODE_COMPLIANT }, 0, 0, VD, "decoder_mode" },
678  { "low_latency", "DPB delay is 0", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_VIDEO_DECODER_MODE_LOW_LATENCY }, 0, 0, VD, "decoder_mode" },
679 
680  // Timestamp mode
681  { "timestamp_mode", "Timestamp mode", OFFSET(timestamp_mode), AV_OPT_TYPE_INT, { .i64 = AMF_TS_SORT }, -1, AMF_TS_DECODE, VD, "timestamp_mode" },
682  { "presentation", "Preserve timestamps from input to output", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_TS_PRESENTATION }, 0, 0, VD, "timestamp_mode" },
683  { "sort", "Resort PTS list", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_TS_SORT }, 0, 0, VD, "timestamp_mode" },
684  { "decode", "Decode order", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_TS_DECODE }, 0, 0, VD, "timestamp_mode" },
685 
686  // Reference frame management
687  { "surface_pool_size", "Number of surfaces in the decode pool", OFFSET(surface_pool_size), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, INT_MAX, VD, NULL },
688  { "dpb_size", "Minimum number of surfaces for reordering", OFFSET(dpb_size), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 32, VD, NULL },
689 
690  { "lowlatency", "Low latency", OFFSET(lowlatency), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 1, VD, NULL },
691  { "smart_access_video", "Smart Access Video", OFFSET(smart_access_video), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 1, VD, NULL },
692  { "skip_transfer_sav", "Skip transfer on another GPU when SAV enabled", OFFSET(skip_transfer_sav), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 1, VD, NULL },
693  { "copy_output", "Copy Output", OFFSET(copy_output), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 1, VD, NULL },
694 
695  { NULL }
696 };
697 
698 static const AVClass amf_decode_class = {
699  .class_name = "amf",
700  .item_name = av_default_item_name,
701  .option = options,
702  .version = LIBAVUTIL_VERSION_INT,
703 };
704 
705 #define DEFINE_AMF_DECODER(x, X, bsf_name) \
706 const FFCodec ff_##x##_amf_decoder = { \
707  .p.name = #x "_amf", \
708  CODEC_LONG_NAME(#X " AMD AMF video decoder"), \
709  .priv_data_size = sizeof(AMFDecoderContext), \
710  .p.type = AVMEDIA_TYPE_VIDEO, \
711  .p.id = AV_CODEC_ID_##X, \
712  .init = amf_decode_init, \
713  FF_CODEC_RECEIVE_FRAME_CB(amf_decode_frame), \
714  .flush = amf_decode_flush, \
715  .close = amf_decode_close, \
716  .bsfs = bsf_name, \
717  .p.capabilities = AV_CODEC_CAP_HARDWARE | AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AVOID_PROBING, \
718  .p.priv_class = &amf_decode_class, \
719  CODEC_PIXFMTS_ARRAY(amf_dec_pix_fmts), \
720  .hw_configs = amf_hw_configs, \
721  .p.wrapper_name = "amf", \
722  .caps_internal = FF_CODEC_CAP_NOT_INIT_THREADSAFE, \
723 }; \
724 
725 DEFINE_AMF_DECODER(h264, H264, "h264_mp4toannexb")
726 DEFINE_AMF_DECODER(hevc, HEVC, NULL)
727 DEFINE_AMF_DECODER(vp9, VP9, NULL)
728 DEFINE_AMF_DECODER(av1, AV1, NULL)
hwconfig.h
AVMasteringDisplayMetadata::has_primaries
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
Definition: mastering_display_metadata.h:62
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: packet.c:430
ff_decode_get_packet
int ff_decode_get_packet(AVCodecContext *avctx, AVPacket *pkt)
Called by decoders to get the next packet for decoding.
Definition: decode.c:244
AVMasteringDisplayMetadata::max_luminance
AVRational max_luminance
Max luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:57
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:659
amf_amfsurface_to_avframe
static int amf_amfsurface_to_avframe(AVCodecContext *avctx, AMFSurface *surface, AVFrame *frame)
Definition: amfdec.c:368
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
amf_decode_frame
static int amf_decode_frame(AVCodecContext *avctx, struct AVFrame *frame)
Definition: amfdec.c:562
AVMasteringDisplayMetadata::display_primaries
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
Definition: mastering_display_metadata.h:42
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:198
AVMasteringDisplayMetadata::has_luminance
int has_luminance
Flag indicating whether the luminance (min_ and max_) have been set.
Definition: mastering_display_metadata.h:67
amf_decode_init
static int amf_decode_init(AVCodecContext *avctx)
Definition: amfdec.c:277
AMF_VIDEO_DECODER_OUTPUT_FORMAT
#define AMF_VIDEO_DECODER_OUTPUT_FORMAT
Definition: amfdec.c:45
AVContentLightMetadata::MaxCLL
unsigned MaxCLL
Max content light level (cd/m^2).
Definition: mastering_display_metadata.h:111
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:652
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:750
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:259
AVPacket::data
uint8_t * data
Definition: packet.h:535
AVOption
AVOption.
Definition: opt.h:429
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:647
data
const char data[16]
Definition: mxf.c:149
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:528
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
AVPacket::duration
int64_t duration
Duration of this packet in AVStream->time_base units, 0 if unknown.
Definition: packet.h:553
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:168
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:218
AVCOL_SPC_BT2020_CL
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:685
av_packet_free
void av_packet_free(AVPacket **pkt)
Free the packet, if the packet is reference counted, it will be unreferenced first.
Definition: packet.c:75
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
AVContentLightMetadata
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
Definition: mastering_display_metadata.h:107
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:440
fail
#define fail()
Definition: checkasm.h:196
AVCodecContext::thread_count
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
Definition: avcodec.h:1561
av_amf_to_av_format
enum AVPixelFormat av_amf_to_av_format(enum AMF_SURFACE_FORMAT fmt)
Definition: hwcontext_amf.c:132
AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX
@ AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX
The codec supports this format via the hw_frames_ctx interface.
Definition: codec.h:311
val
static double val(void *priv, double ch)
Definition: aeval.c:77
AVCodecContext::coded_height
int coded_height
Definition: avcodec.h:607
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
decode_bsf.h
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:645
pkt
AVPacket * pkt
Definition: movenc.c:60
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
AVHWFramesContext::height
int height
Definition: hwcontext.h:218
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:515
AVMasteringDisplayMetadata::white_point
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
Definition: mastering_display_metadata.h:47
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:680
AV_CODEC_ID_VP9
@ AV_CODEC_ID_VP9
Definition: codec_id.h:222
amf_update_buffer_properties
static AMF_RESULT amf_update_buffer_properties(AVCodecContext *avctx, AMFBuffer *buffer, const AVPacket *pkt)
Definition: amfdec.c:519
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
ctx
AVFormatContext * ctx
Definition: movenc.c:49
decode.h
AMF_GOTO_FAIL_IF_FALSE
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
Definition: hwcontext_amf_internal.h:34
AVCodecHWConfig::pix_fmt
enum AVPixelFormat pix_fmt
For decoders, a hardware pixel format which that decoder may be able to decode to if suitable hardwar...
Definition: codec.h:339
hwcontext_amf.h
AVAMFDeviceContext::version
int64_t version
version of AMF runtime
Definition: hwcontext_amf.h:38
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:410
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
AVCOL_PRI_UNSPECIFIED
@ AVCOL_PRI_UNSPECIFIED
Definition: pixfmt.h:622
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
dpb_size
int dpb_size
Definition: h264_levels.c:111
if
if(ret)
Definition: filter_design.txt:179
DEFINE_AMF_DECODER
#define DEFINE_AMF_DECODER(x, X, bsf_name)
Definition: amfdec.c:705
AMFDecoderContext
AMF decoder context.
Definition: amfdec.h:39
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:211
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:669
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:284
amf_buffer_from_packet
static AMF_RESULT amf_buffer_from_packet(AVCodecContext *avctx, const AVPacket *pkt, AMFBuffer **buffer)
Definition: amfdec.c:533
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:240
OFFSET
#define OFFSET(x)
Definition: amfdec.c:670
options
Definition: swscale.c:43
time.h
ff_decode_get_extradata
static void ff_decode_get_extradata(const AVCodecContext *avctx, const uint8_t **extradata, int *extradata_size)
Helper function for decoders that may use a BSF that changes extradata.
Definition: decode_bsf.h:32
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:716
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:661
hwcontext_dxva2.h
AVPacket::size
int size
Definition: packet.h:536
AVCodecContext::extra_hw_frames
int extra_hw_frames
Video decoding only.
Definition: avcodec.h:1498
codec_internal.h
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:586
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
AVAMFDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_amf.h:33
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
AVCodecHWConfigInternal
Definition: hwconfig.h:25
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:534
av_content_light_metadata_create_side_data
AVContentLightMetadata * av_content_light_metadata_create_side_data(AVFrame *frame)
Allocate a complete AVContentLightMetadata and add it to the frame.
Definition: mastering_display_metadata.c:82
amfdec.h
amf_init_decoder
static int amf_init_decoder(AVCodecContext *avctx)
Definition: amfdec.c:83
av_packet_alloc
AVPacket * av_packet_alloc(void)
Allocate an AVPacket and set its fields to default values.
Definition: packet.c:64
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:221
FF_THREAD_FRAME
#define FF_THREAD_FRAME
Decode more than one frame at once.
Definition: avcodec.h:1572
AVCodec::id
enum AVCodecID id
Definition: codec.h:186
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:714
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:528
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:684
amf_init_frames_context
static int amf_init_frames_context(AVCodecContext *avctx, int sw_format, int new_width, int new_height)
Definition: amfdec.c:247
amf_dec_pix_fmts
enum AVPixelFormat amf_dec_pix_fmts[]
Definition: amfdec.c:47
hw_device_ctx
static AVBufferRef * hw_device_ctx
Definition: hw_decode.c:45
VD
#define VD
Definition: amfdec.c:671
amf_legacy_driver_no_bitness_detect
static int amf_legacy_driver_no_bitness_detect(AVAMFDeviceContext *amf_device_ctx)
Definition: amfdec.c:74
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:228
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1475
AVMasteringDisplayMetadata
Mastering display metadata capable of representing the color volume of the display used to master the...
Definition: mastering_display_metadata.h:38
AVCodecContext::height
int height
Definition: avcodec.h:592
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:631
amf_decode_flush
static void amf_decode_flush(AVCodecContext *avctx)
Definition: amfdec.c:664
amf_hw_configs
static const AVCodecHWConfigInternal *const amf_hw_configs[]
Definition: amfdec.c:55
AVCodecContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
Definition: avcodec.h:1453
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:116
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:73
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:81
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:611
AV_INPUT_BUFFER_PADDING_SIZE
#define AV_INPUT_BUFFER_PADDING_SIZE
Definition: defs.h:40
amf_decode_class
static const AVClass amf_decode_class
Definition: amfdec.c:698
ff_decode_frame_props
int ff_decode_frame_props(AVCodecContext *avctx, AVFrame *frame)
Set various frame properties from the codec context / packet data.
Definition: decode.c:1481
AVCodecContext
main external API structure.
Definition: avcodec.h:431
AVCodecContext::active_thread_type
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:1580
hwcontext_amf_internal.h
copy_output
static int copy_output(SANMVideoContext *ctx, SANMFrameHeader *hdr)
Definition: sanm.c:2252
av_mastering_display_metadata_create_side_data
AVMasteringDisplayMetadata * av_mastering_display_metadata_create_side_data(AVFrame *frame)
Allocate a complete AVMasteringDisplayMetadata and add it to the frame.
Definition: mastering_display_metadata.c:58
amf_decode_close
static int amf_decode_close(AVCodecContext *avctx)
Definition: amfdec.c:231
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
amf_receive_frame
static AMF_RESULT amf_receive_frame(AVCodecContext *avctx, AVFrame *frame)
Definition: amfdec.c:485
L
#define L(x)
Definition: vpx_arith.h:36
AVMasteringDisplayMetadata::min_luminance
AVRational min_luminance
Min luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:52
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:585
AVCodecContext::coded_width
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:607
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:188
AVERROR_ENCODER_NOT_FOUND
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
Definition: error.h:56
amf_get_property_buffer
static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
Definition: amfdec.c:347
mem.h
mastering_display_metadata.h
ff_attach_decode_data
int ff_attach_decode_data(AVFrame *frame)
Definition: decode.c:1558
AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX
@ AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX
The codec supports this format via the hw_device_ctx interface.
Definition: codec.h:298
AVPacket
This structure stores compressed data.
Definition: packet.h:512
AVContentLightMetadata::MaxFALL
unsigned MaxFALL
Max average light level per frame (cd/m^2).
Definition: mastering_display_metadata.h:116
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:458
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:592
imgutils.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVCodecContext::sw_pix_fmt
enum AVPixelFormat sw_pix_fmt
Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:638
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:675
AVCodecHWConfigInternal::public
AVCodecHWConfig public
This is the structure which will be returned to the user by avcodec_get_hw_config().
Definition: hwconfig.h:30
amf_free_amfsurface
static void amf_free_amfsurface(void *opaque, uint8_t *data)
Definition: amfdec.c:68
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
hwcontext_d3d11va.h
options
static const AVOption options[]
Definition: amfdec.c:673
w32dlfcn.h