FFmpeg
vf_amf_common.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "vf_amf_common.h"
20 
21 #include "libavutil/avassert.h"
22 #include "avfilter.h"
23 #include "avfilter_internal.h"
24 #include "formats.h"
25 #include "libavutil/mem.h"
26 #include "libavutil/imgutils.h"
27 
28 #include "AMF/components/VideoDecoderUVD.h"
31 #include "scale_eval.h"
32 
33 #if CONFIG_DXVA2
34 #include <d3d9.h>
35 #endif
36 
37 #if CONFIG_D3D11VA
38 #include <d3d11.h>
39 #endif
40 
42 {
43  AMFFilterContext *ctx = avctx->priv;
44 
45  if (!strcmp(ctx->format_str, "same")) {
46  ctx->format = AV_PIX_FMT_NONE;
47  } else {
48  ctx->format = av_get_pix_fmt(ctx->format_str);
49  if (ctx->format == AV_PIX_FMT_NONE) {
50  av_log(avctx, AV_LOG_ERROR, "Unrecognized pixel format: %s\n", ctx->format_str);
51  return AVERROR(EINVAL);
52  }
53  }
54 
55  return 0;
56 }
57 
59 {
60  AMFFilterContext *ctx = avctx->priv;
61 
62  if (ctx->component) {
63  ctx->component->pVtbl->Terminate(ctx->component);
64  ctx->component->pVtbl->Release(ctx->component);
65  ctx->component = NULL;
66  }
67 
68  if (ctx->master_display)
69  av_freep(&ctx->master_display);
70 
71  if (ctx->light_meta)
72  av_freep(&ctx->light_meta);
73 
74  av_buffer_unref(&ctx->amf_device_ref);
75  av_buffer_unref(&ctx->hwdevice_ref);
76  av_buffer_unref(&ctx->hwframes_in_ref);
77  av_buffer_unref(&ctx->hwframes_out_ref);
78 }
79 
81 {
82  AVFilterContext *avctx = inlink->dst;
83  AMFFilterContext *ctx = avctx->priv;
84  AVFilterLink *outlink = avctx->outputs[0];
85  AMF_RESULT res;
86  AMFSurface *surface_in;
87  AMFSurface *surface_out;
88  AMFData *data_out = NULL;
89  enum AVColorSpace out_colorspace;
90  enum AVColorRange out_color_range;
91 
92  AVFrame *out = NULL;
93  int ret = 0;
94 
95  if (!ctx->component)
96  return AVERROR(EINVAL);
97 
98  ret = amf_avframe_to_amfsurface(avctx, in, &surface_in);
99  if (ret < 0)
100  goto fail;
101 
102  res = ctx->component->pVtbl->SubmitInput(ctx->component, (AMFData*)surface_in);
103  surface_in->pVtbl->Release(surface_in); // release surface after use
104  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
105  res = ctx->component->pVtbl->QueryOutput(ctx->component, &data_out);
106  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "QueryOutput() failed with error %d\n", res);
107 
108  if (data_out) {
109  AMFGuid guid = IID_AMFSurface();
110  res = data_out->pVtbl->QueryInterface(data_out, &guid, (void**)&surface_out); // query for buffer interface
111  data_out->pVtbl->Release(data_out);
112  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "QueryInterface(IID_AMFSurface) failed with error %d\n", res);
113  } else {
114  return AVERROR(EAGAIN);
115  }
116 
117  out = amf_amfsurface_to_avframe(avctx, surface_out);
118 
119  ret = av_frame_copy_props(out, in);
120  av_frame_unref(in);
121 
122  out_colorspace = AVCOL_SPC_UNSPECIFIED;
123 
124  if (ctx->color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN) {
125  switch(ctx->color_profile) {
126  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_601:
127  out_colorspace = AVCOL_SPC_SMPTE170M;
128  break;
129  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_709:
130  out_colorspace = AVCOL_SPC_BT709;
131  break;
132  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020:
133  out_colorspace = AVCOL_SPC_BT2020_NCL;
134  break;
135  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_JPEG:
136  out_colorspace = AVCOL_SPC_RGB;
137  break;
138  default:
139  out_colorspace = AVCOL_SPC_UNSPECIFIED;
140  break;
141  }
142  out->colorspace = out_colorspace;
143  }
144 
145  out_color_range = AVCOL_RANGE_UNSPECIFIED;
146  if (ctx->out_color_range == AMF_COLOR_RANGE_FULL)
147  out_color_range = AVCOL_RANGE_JPEG;
148  else if (ctx->out_color_range == AMF_COLOR_RANGE_STUDIO)
149  out_color_range = AVCOL_RANGE_MPEG;
150 
151  if (ctx->out_color_range != AMF_COLOR_RANGE_UNDEFINED)
152  out->color_range = out_color_range;
153 
154  if (ctx->out_primaries != AMF_COLOR_PRIMARIES_UNDEFINED)
155  out->color_primaries = ctx->out_primaries;
156 
157  if (ctx->out_trc != AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED)
158  out->color_trc = ctx->out_trc;
159 
160 
161  if (ret < 0)
162  goto fail;
163 
164  out->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
165  if (!out->hw_frames_ctx) {
166  ret = AVERROR(ENOMEM);
167  goto fail;
168  }
169 
170  av_frame_free(&in);
171  return ff_filter_frame(outlink, out);
172 fail:
173  av_frame_free(&in);
174  av_frame_free(&out);
175  return ret;
176 }
177 
178 
179 
181  const enum AVPixelFormat *input_pix_fmts,
182  const enum AVPixelFormat *output_pix_fmts)
183 {
184  int err;
185  AVFilterFormats *input_formats;
186  AVFilterFormats *output_formats;
187 
188  //in case if hw_device_ctx is set to DXVA2 we change order of pixel formats to set DXVA2 be chosen by default
189  //The order is ignored if hw_frames_ctx is not NULL on the config_output stage
190  if (avctx->hw_device_ctx) {
191  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
192 
193  switch (device_ctx->type) {
194  #if CONFIG_D3D11VA
196  {
197  static const enum AVPixelFormat output_pix_fmts_d3d11[] = {
200  };
201  output_pix_fmts = output_pix_fmts_d3d11;
202  }
203  break;
204  #endif
205  #if CONFIG_DXVA2
207  {
208  static const enum AVPixelFormat output_pix_fmts_dxva2[] = {
211  };
212  output_pix_fmts = output_pix_fmts_dxva2;
213  }
214  break;
215  #endif
217  break;
218  default:
219  {
220  av_log(avctx, AV_LOG_ERROR, "Unsupported device : %s\n", av_hwdevice_get_type_name(device_ctx->type));
221  return AVERROR(EINVAL);
222  }
223  break;
224  }
225  }
226 
227  input_formats = ff_make_pixel_format_list(output_pix_fmts);
228  if (!input_formats) {
229  return AVERROR(ENOMEM);
230  }
231  output_formats = ff_make_pixel_format_list(output_pix_fmts);
232  if (!output_formats) {
233  return AVERROR(ENOMEM);
234  }
235 
236  if ((err = ff_formats_ref(input_formats, &avctx->inputs[0]->outcfg.formats)) < 0)
237  return err;
238 
239  if ((err = ff_formats_ref(output_formats, &avctx->outputs[0]->incfg.formats)) < 0)
240  return err;
241  return 0;
242 }
243 
245  AMFSurface* surface)
246 {
247  AMFPlane *plane;
248  uint8_t *dst_data[4];
249  int dst_linesize[4];
250  int planes;
251  int i;
252 
253  planes = (int)surface->pVtbl->GetPlanesCount(surface);
254  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
255 
256  for (i = 0; i < planes; i++) {
257  plane = surface->pVtbl->GetPlaneAt(surface, i);
258  dst_data[i] = plane->pVtbl->GetNative(plane);
259  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
260  }
261  av_image_copy(dst_data, dst_linesize,
262  (const uint8_t**)frame->data, frame->linesize, frame->format,
263  frame->width, frame->height);
264 
265  return 0;
266 }
267 
268 int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
269 {
270  int err;
271  AMF_RESULT res;
272  AVFilterContext *avctx = outlink->src;
273  AVFilterLink *inlink = avctx->inputs[0];
274  AMFFilterContext *ctx = avctx->priv;
275  AVHWFramesContext *hwframes_out;
276  AVHWDeviceContext *hwdev_ctx;
277  enum AVPixelFormat in_sw_format = inlink->format;
278  enum AVPixelFormat out_sw_format = ctx->format;
280  FilterLink *outl = ff_filter_link(outlink);
281  double w_adj = 1.0;
282 
283  if (ctx->w_expr && ctx->h_expr) {
284  if ((err = ff_scale_eval_dimensions(avctx,
285  ctx->w_expr, ctx->h_expr,
286  inlink, outlink,
287  &ctx->width, &ctx->height)) < 0)
288  return err;
289  } else {
290  ctx->width = inlink->w;
291  ctx->height = inlink->h;
292  }
293 
294  if (ctx->reset_sar && inlink->sample_aspect_ratio.num)
295  w_adj = (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den;
296 
297  ff_scale_adjust_dimensions(inlink, &ctx->width, &ctx->height,
298  ctx->force_original_aspect_ratio, ctx->force_divisible_by, w_adj);
299 
300  av_buffer_unref(&ctx->amf_device_ref);
301  av_buffer_unref(&ctx->hwframes_in_ref);
302  av_buffer_unref(&ctx->hwframes_out_ref);
303  ctx->local_context = 0;
304  if (inl->hw_frames_ctx) {
306  if (av_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
307  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
308  av_get_pix_fmt_name(frames_ctx->sw_format));
309  return AVERROR(EINVAL);
310  }
311 
312  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
313  if (err < 0)
314  return err;
315 
316  ctx->hwframes_in_ref = av_buffer_ref(inl->hw_frames_ctx);
317  if (!ctx->hwframes_in_ref)
318  return AVERROR(ENOMEM);
319 
320  in_sw_format = frames_ctx->sw_format;
321  } else if (avctx->hw_device_ctx) {
322  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, avctx->hw_device_ctx, 0);
323  if (err < 0)
324  return err;
325  ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx);
326  if (!ctx->hwdevice_ref)
327  return AVERROR(ENOMEM);
328  } else {
329  res = av_hwdevice_ctx_create(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
330  AMF_RETURN_IF_FALSE(avctx, res == 0, res, "Failed to create hardware device context (AMF) : %s\n", av_err2str(res));
331 
332  }
333  if(out_sw_format == AV_PIX_FMT_NONE){
334  if(outlink->format == AV_PIX_FMT_AMF_SURFACE)
335  out_sw_format = in_sw_format;
336  else
337  out_sw_format = outlink->format;
338  }
339  ctx->hwframes_out_ref = av_hwframe_ctx_alloc(ctx->amf_device_ref);
340  if (!ctx->hwframes_out_ref)
341  return AVERROR(ENOMEM);
342  hwframes_out = (AVHWFramesContext*)ctx->hwframes_out_ref->data;
343  hwdev_ctx = (AVHWDeviceContext*)ctx->amf_device_ref->data;
344  if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
345  {
346  ctx->amf_device_ctx = hwdev_ctx->hwctx;
347  }
348  hwframes_out->format = AV_PIX_FMT_AMF_SURFACE;
349  hwframes_out->sw_format = out_sw_format;
350 
351  if (inlink->format == AV_PIX_FMT_AMF_SURFACE) {
352  *in_format = in_sw_format;
353  } else {
354  *in_format = inlink->format;
355  }
356  outlink->w = ctx->width;
357  outlink->h = ctx->height;
358 
359  if (ctx->reset_sar)
360  outlink->sample_aspect_ratio = (AVRational){1, 1};
361  else if (inlink->sample_aspect_ratio.num) {
362  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink->w, outlink->w * inlink->h}, inlink->sample_aspect_ratio);
363  } else
364  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
365 
366  hwframes_out->width = outlink->w;
367  hwframes_out->height = outlink->h;
368 
369  err = av_hwframe_ctx_init(ctx->hwframes_out_ref);
370  if (err < 0)
371  return err;
372 
373  outl->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
374  if (!outl->hw_frames_ctx) {
375  return AVERROR(ENOMEM);
376  }
377  return 0;
378 }
379 
380 void amf_free_amfsurface(void *opaque, uint8_t *data)
381 {
382  AMFSurface *surface = (AMFSurface*)data;
383  surface->pVtbl->Release(surface);
384 }
385 
386 AVFrame *amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface* pSurface)
387 {
389  AMFFilterContext *ctx = avctx->priv;
390 
391  if (!frame)
392  return NULL;
393 
394  if (ctx->hwframes_out_ref) {
395  AVHWFramesContext *hwframes_out = (AVHWFramesContext *)ctx->hwframes_out_ref->data;
396  if (hwframes_out->format == AV_PIX_FMT_AMF_SURFACE) {
397  int ret = av_hwframe_get_buffer(ctx->hwframes_out_ref, frame, 0);
398  if (ret < 0) {
399  av_log(avctx, AV_LOG_ERROR, "Get hw frame failed.\n");
401  return NULL;
402  }
403  frame->data[0] = (uint8_t *)pSurface;
404  frame->buf[1] = av_buffer_create((uint8_t *)pSurface, sizeof(AMFSurface),
406  (void*)avctx,
408  } else { // FIXME: add processing of other hw formats
409  av_log(ctx, AV_LOG_ERROR, "Unknown pixel format\n");
410  return NULL;
411  }
412  } else {
413 
414  switch (pSurface->pVtbl->GetMemoryType(pSurface))
415  {
416  #if CONFIG_D3D11VA
417  case AMF_MEMORY_DX11:
418  {
419  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
420  frame->data[0] = plane0->pVtbl->GetNative(plane0);
421  frame->data[1] = (uint8_t*)(intptr_t)0;
422 
423  frame->buf[0] = av_buffer_create(NULL,
424  0,
426  pSurface,
428  }
429  break;
430  #endif
431  #if CONFIG_DXVA2
432  case AMF_MEMORY_DX9:
433  {
434  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
435  frame->data[3] = plane0->pVtbl->GetNative(plane0);
436 
437  frame->buf[0] = av_buffer_create(NULL,
438  0,
440  pSurface,
442  }
443  break;
444  #endif
445  default:
446  {
447  av_log(avctx, AV_LOG_ERROR, "Unsupported memory type : %d\n", pSurface->pVtbl->GetMemoryType(pSurface));
448  return NULL;
449  }
450  }
451  }
452 
453 
454  return frame;
455 }
456 
457 int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface** ppSurface)
458 {
459  AMFVariantStruct var = { 0 };
460  AMFFilterContext *ctx = avctx->priv;
461  AMFBuffer *hdrmeta_buffer = NULL;
462  AMFSurface *surface;
463  AMF_RESULT res;
464  int hw_surface = 0;
465 
466  switch (frame->format) {
467 #if CONFIG_D3D11VA
468  case AV_PIX_FMT_D3D11:
469  {
470  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
471  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
472  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
473  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
474 
475  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
476  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
477  hw_surface = 1;
478  }
479  break;
480 #endif
482  {
483  surface = (AMFSurface*)frame->data[0]; // actual surface
484  surface->pVtbl->Acquire(surface); // returned surface has to be to be ref++
485  hw_surface = 1;
486  }
487  break;
488 
489 #if CONFIG_DXVA2
491  {
492  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
493 
494  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
495  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
496  hw_surface = 1;
497  }
498  break;
499 #endif
500  default:
501  {
502  AMF_SURFACE_FORMAT amf_fmt = av_av_to_amf_format(frame->format);
503  res = ctx->amf_device_ctx->context->pVtbl->AllocSurface(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, amf_fmt, frame->width, frame->height, &surface);
504  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
505  amf_copy_surface(avctx, frame, surface);
506  }
507  break;
508  }
509 
510  // If AMFSurface comes from other AMF components, it may have various
511  // properties already set. These properties can be used by other AMF
512  // components to perform their tasks. In the context of the AMF video
513  // filter, that other component could be an AMFVideoConverter. By default,
514  // AMFVideoConverter will use HDR related properties assigned to a surface
515  // by an AMFDecoder. If frames (surfaces) originated from any other source,
516  // i.e. from hevcdec, assign those properties from avframe; do not
517  // overwrite these properties if they already have a value.
518  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_TRANSFER_CHARACTERISTIC, &var);
519 
520  if (res == AMF_NOT_FOUND && frame->color_trc != AVCOL_TRC_UNSPECIFIED)
521  // Note: as of now(Feb 2026), most AV and AMF enums are interchangeable.
522  // TBD: can enums change their values in the future?
523  // For better future-proofing it's better to have dedicated
524  // enum mapping functions.
525  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_TRANSFER_CHARACTERISTIC, frame->color_trc);
526 
527  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_PRIMARIES, &var);
528  if (res == AMF_NOT_FOUND && frame->color_primaries != AVCOL_PRI_UNSPECIFIED)
529  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_PRIMARIES, frame->color_primaries);
530 
531  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_RANGE, &var);
532  if (res == AMF_NOT_FOUND && frame->color_range != AVCOL_RANGE_UNSPECIFIED)
533  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_RANGE, frame->color_range);
534 
535  // Color range for older drivers
536  if (frame->color_range == AVCOL_RANGE_JPEG) {
537  AMF_ASSIGN_PROPERTY_BOOL(res, surface, AMF_VIDEO_DECODER_FULL_RANGE_COLOR, 1);
538  } else if (frame->color_range != AVCOL_RANGE_UNSPECIFIED)
539  AMF_ASSIGN_PROPERTY_BOOL(res, surface, AMF_VIDEO_DECODER_FULL_RANGE_COLOR, 0);
540 
541  // Color profile for newer drivers
542  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_COLOR_PROFILE, &var);
543  if (res == AMF_NOT_FOUND && frame->color_range != AVCOL_RANGE_UNSPECIFIED && frame->colorspace != AVCOL_SPC_UNSPECIFIED) {
544  amf_int64 color_profile = color_profile = av_amf_get_color_profile(frame->color_range, frame->colorspace);
545 
546  if (color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN)
547  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_DECODER_COLOR_PROFILE, color_profile);
548  }
549 
550  if (ctx->in_trc == AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084 && (ctx->master_display || ctx->light_meta)) {
551  res = ctx->amf_device_ctx->context->pVtbl->AllocBuffer(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &hdrmeta_buffer);
552  if (res == AMF_OK) {
553  AMFHDRMetadata *hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
554 
555  av_amf_display_mastering_meta_to_hdrmeta(ctx->master_display, hdrmeta);
556  av_amf_light_metadata_to_hdrmeta(ctx->light_meta, hdrmeta);
557  AMF_ASSIGN_PROPERTY_INTERFACE(res, surface, AMF_VIDEO_DECODER_HDR_METADATA, hdrmeta_buffer);
558  }
559  } else if (frame->color_trc == AVCOL_TRC_SMPTE2084) {
560  res = surface->pVtbl->GetProperty(surface, AMF_VIDEO_DECODER_HDR_METADATA, &var);
561  if (res == AMF_NOT_FOUND) {
562  res = ctx->amf_device_ctx->context->pVtbl->AllocBuffer(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &hdrmeta_buffer);
563  if (res == AMF_OK) {
564  AMFHDRMetadata *hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
565 
566  if (av_amf_extract_hdr_metadata(frame, hdrmeta) == 0)
567  AMF_ASSIGN_PROPERTY_INTERFACE(res, surface, AMF_VIDEO_DECODER_HDR_METADATA, hdrmeta_buffer);
568  }
569  }
570  }
571 
572  if (hdrmeta_buffer) {
573  hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
574  hdrmeta_buffer = NULL;
575  }
576 
577  if (frame->crop_left || frame->crop_right || frame->crop_top || frame->crop_bottom) {
578  size_t crop_x = frame->crop_left;
579  size_t crop_y = frame->crop_top;
580  size_t crop_w = frame->width - (frame->crop_left + frame->crop_right);
581  size_t crop_h = frame->height - (frame->crop_top + frame->crop_bottom);
582  AVFilterLink *outlink = avctx->outputs[0];
583  if (crop_x || crop_y) {
584  if (crop_w == outlink->w && crop_h == outlink->h) {
585  AMFData *cropped_buffer = NULL;
586  res = surface->pVtbl->Duplicate(surface, surface->pVtbl->GetMemoryType(surface), &cropped_buffer);
587  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "Duplicate() failed with error %d\n", res);
588  surface->pVtbl->Release(surface);
589  surface = (AMFSurface*)cropped_buffer;
590  }
591  else
592  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
593  }
594  else
595  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
596  }
597  else if (hw_surface) {
598  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
599  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
600  }
601 
602  surface->pVtbl->SetPts(surface, frame->pts);
603  *ppSurface = surface;
604  return 0;
605 }
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
amf_avframe_to_amfsurface
int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface **ppSurface)
Definition: vf_amf_common.c:457
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
static FILE * out
Definition: movenc.c:55
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:337
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:434
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:777
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:263
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:669
ff_make_pixel_format_list
av_warn_unused_result AVFilterFormats * ff_make_pixel_format_list(const enum AVPixelFormat *fmts)
Create a list of supported pixel formats.
data
const char data[16]
Definition: mxf.c:149
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:701
ff_scale_eval_dimensions
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
Definition: scale_eval.c:57
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:356
amf_setup_input_output_formats
int amf_setup_input_output_formats(AVFilterContext *avctx, const enum AVPixelFormat *input_pix_fmts, const enum AVPixelFormat *output_pix_fmts)
Definition: vf_amf_common.c:180
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:169
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:220
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
av_amf_display_mastering_meta_to_hdrmeta
int av_amf_display_mastering_meta_to_hdrmeta(const AVMasteringDisplayMetadata *display_meta, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:186
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
amf_free_amfsurface
void amf_free_amfsurface(void *opaque, uint8_t *data)
Definition: vf_amf_common.c:380
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:289
fail
#define fail()
Definition: checkasm.h:224
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_av_to_amf_format
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
Definition: hwcontext_amf.c:133
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_amf_get_color_profile
enum AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM av_amf_get_color_profile(enum AVColorRange color_range, enum AVColorSpace color_space)
Definition: hwcontext_amf.c:155
AVHWFramesContext::height
int height
Definition: hwcontext.h:220
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:199
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:707
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:755
vf_amf_common.h
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:42
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
AMFFilterContext
Definition: vf_amf_common.h:29
ctx
static AVFormatContext * ctx
Definition: movenc.c:49
AMF_GOTO_FAIL_IF_FALSE
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
Definition: hwcontext_amf_internal.h:34
hwcontext_amf.h
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:120
AVCOL_PRI_UNSPECIFIED
@ AVCOL_PRI_UNSPECIFIED
Definition: pixfmt.h:639
if
if(ret)
Definition: filter_design.txt:179
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:213
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:599
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:129
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:282
double
double
Definition: af_crystalizer.c:132
avfilter_internal.h
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:743
index
int index
Definition: gxfenc.c:90
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:683
i
#define i(width, name, range_min, range_max)
Definition: cbs_h264.c:63
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
av_amf_extract_hdr_metadata
int av_amf_extract_hdr_metadata(const AVFrame *frame, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:233
scale_eval.h
amf_copy_surface
int amf_copy_surface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: vf_amf_common.c:244
planes
static const struct @585 planes[]
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
amf_filter_filter_frame
int amf_filter_filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_amf_common.c:80
amf_filter_uninit
void amf_filter_uninit(AVFilterContext *avctx)
Definition: vf_amf_common.c:58
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:718
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:711
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:700
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:496
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:703
amf_filter_init
int amf_filter_init(AVFilterContext *avctx)
Definition: vf_amf_common.c:41
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:760
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:3388
hwcontext_amf_internal.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
avfilter.h
amf_init_filter_config
int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
Definition: vf_amf_common.c:268
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
mem.h
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:126
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
amf_amfsurface_to_avframe
AVFrame * amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface *pSurface)
Definition: vf_amf_common.c:386
imgutils.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_amf_light_metadata_to_hdrmeta
int av_amf_light_metadata_to_hdrmeta(const AVContentLightMetadata *light_meta, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:222
av_image_copy
void av_image_copy(uint8_t *const dst_data[4], const int dst_linesizes[4], const uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:702
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:742
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:506
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by, double w_adj)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:122
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3376
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:286