FFmpeg
vf_scale_d3d12.c
Go to the documentation of this file.
1 /**
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #define COBJMACROS
20 
21 #include "libavutil/opt.h"
22 #include "libavutil/pixdesc.h"
23 #include "compat/w32dlfcn.h"
24 
25 #include "libavutil/hwcontext.h"
28 
29 #include "filters.h"
30 #include "scale_eval.h"
31 #include "video.h"
32 
33 typedef struct ScaleD3D12Context {
34  const AVClass *classCtx;
35  char *w_expr;
36  char *h_expr;
38 
39  /* D3D12 objects */
40  ID3D12Device *device;
41  ID3D12VideoDevice *video_device;
42  ID3D12VideoProcessor *video_processor;
43  ID3D12CommandQueue *command_queue;
44  ID3D12VideoProcessCommandList *command_list;
45  ID3D12CommandAllocator *command_allocator;
46 
47  /* Synchronization */
48  ID3D12Fence *fence;
49  UINT64 fence_value;
50  HANDLE fence_event;
51 
52  /* Buffer references */
55 
56  /* Dimensions and formats */
57  int width, height;
59  DXGI_FORMAT input_format;
60  DXGI_FORMAT output_format;
61 
62  /* Color space and frame rate */
63  DXGI_COLOR_SPACE_TYPE input_colorspace;
65 
66  /* Video processor capabilities */
67  D3D12_FEATURE_DATA_VIDEO_PROCESS_SUPPORT process_support;
69 
71  return 0;
72 }
73 
75  UINT64 fence_value;
76  HRESULT hr;
77  /* Wait for all GPU operations to complete before releasing resources */
78  if (s->command_queue && s->fence && s->fence_event) {
79  fence_value = s->fence_value - 1;
80  hr = ID3D12CommandQueue_Signal(s->command_queue, s->fence, fence_value);
81  if (SUCCEEDED(hr)) {
82  UINT64 completed = ID3D12Fence_GetCompletedValue(s->fence);
83  if (completed < fence_value) {
84  hr = ID3D12Fence_SetEventOnCompletion(s->fence, fence_value, s->fence_event);
85  if (SUCCEEDED(hr)) {
86  WaitForSingleObject(s->fence_event, INFINITE);
87  }
88  }
89  }
90  }
91 
92  if (s->fence_event) {
93  CloseHandle(s->fence_event);
94  s->fence_event = NULL;
95  }
96 
97  if (s->fence) {
98  ID3D12Fence_Release(s->fence);
99  s->fence = NULL;
100  }
101 
102  if (s->command_list) {
103  ID3D12VideoProcessCommandList_Release(s->command_list);
104  s->command_list = NULL;
105  }
106 
107  if (s->command_allocator) {
108  ID3D12CommandAllocator_Release(s->command_allocator);
109  s->command_allocator = NULL;
110  }
111 
112  if (s->video_processor) {
113  ID3D12VideoProcessor_Release(s->video_processor);
114  s->video_processor = NULL;
115  }
116 
117  if (s->video_device) {
118  ID3D12VideoDevice_Release(s->video_device);
119  s->video_device = NULL;
120  }
121 
122  if (s->command_queue) {
123  ID3D12CommandQueue_Release(s->command_queue);
124  s->command_queue = NULL;
125  }
126 }
127 
128 static DXGI_COLOR_SPACE_TYPE get_dxgi_colorspace(enum AVColorSpace colorspace, enum AVColorTransferCharacteristic trc, int is_10bit)
129 {
130  /* Map FFmpeg color space to DXGI color space */
131  if (is_10bit) {
132  /* 10-bit formats (P010) */
133  if (colorspace == AVCOL_SPC_BT2020_NCL || colorspace == AVCOL_SPC_BT2020_CL) {
134  if (trc == AVCOL_TRC_SMPTE2084) {
135  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020; ///< HDR10
136  } else if (trc == AVCOL_TRC_ARIB_STD_B67) {
137  return DXGI_COLOR_SPACE_YCBCR_STUDIO_GHLG_TOPLEFT_P2020; ///< HLG
138  } else {
139  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020;
140  }
141  } else {
142  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709; ///< Rec.709 10-bit
143  }
144  } else {
145  /* 8-bit formats (NV12) */
146  if (colorspace == AVCOL_SPC_BT2020_NCL || colorspace == AVCOL_SPC_BT2020_CL) {
147  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020;
148  } else if (colorspace == AVCOL_SPC_BT470BG || colorspace == AVCOL_SPC_SMPTE170M) {
149  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P601;
150  } else {
151  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709; ///< Default to Rec.709
152  }
153  }
154 }
155 
157 {
158  AVRational framerate = {0, 0};
159 
160  if (in->duration > 0 && inlink->time_base.num > 0 && inlink->time_base.den > 0) {
161  /*
162  * Calculate framerate from frame duration and timebase
163  * framerate = 1 / (duration * timebase)
164  */
165  av_reduce(&framerate.num, &framerate.den,
166  inlink->time_base.den, in->duration * inlink->time_base.num,
167  INT_MAX);
168  } else if (inlink->time_base.num > 0 && inlink->time_base.den > 0) {
169  /* Estimate from timebase (inverse of timebase is often the framerate) */
170  framerate.num = inlink->time_base.den;
171  framerate.den = inlink->time_base.num;
172  } else {
173  /* Default to 30fps if framerate cannot be determined */
174  framerate.num = 30;
175  framerate.den = 1;
176  av_log(ctx, AV_LOG_WARNING, "Input framerate not determinable, defaulting to 30fps\n");
177  }
178 
179  return framerate;
180 }
181 
183  HRESULT hr;
184 
185  if (s->output_format == DXGI_FORMAT_UNKNOWN) {
186  av_log(ctx, AV_LOG_ERROR, "Output format not initialized\n");
187  return AVERROR(EINVAL);
188  }
189 
190  AVHWDeviceContext *hwctx = (AVHWDeviceContext *)s->hw_device_ctx->data;
191  AVD3D12VADeviceContext *d3d12_hwctx = (AVD3D12VADeviceContext *)hwctx->hwctx;
192  s->device = d3d12_hwctx->device;
193 
194  av_log(ctx, AV_LOG_VERBOSE, "Configuring D3D12 video processor: %dx%d -> %dx%d\n",
195  s->input_width, s->input_height, s->width, s->height);
196 
197  hr = ID3D12Device_QueryInterface(s->device, &IID_ID3D12VideoDevice, (void **)&s->video_device);
198  if (FAILED(hr)) {
199  av_log(ctx, AV_LOG_ERROR, "Failed to get D3D12 video device interface: HRESULT 0x%lX\n", hr);
200  return AVERROR_EXTERNAL;
201  }
202 
203  D3D12_COMMAND_QUEUE_DESC queue_desc = {
204  .Type = D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
205  .Priority = D3D12_COMMAND_QUEUE_PRIORITY_NORMAL,
206  .Flags = D3D12_COMMAND_QUEUE_FLAG_NONE,
207  .NodeMask = 0
208  };
209 
210  hr = ID3D12Device_CreateCommandQueue(s->device, &queue_desc, &IID_ID3D12CommandQueue, (void **)&s->command_queue);
211  if (FAILED(hr)) {
212  av_log(ctx, AV_LOG_ERROR, "Failed to create command queue: HRESULT 0x%lX\n", hr);
213  return AVERROR_EXTERNAL;
214  }
215 
216  s->process_support.NodeIndex = 0;
217 
218  s->process_support.InputSample.Format.Format = s->input_format;
219  s->process_support.InputSample.Format.ColorSpace = s->input_colorspace;
220  s->process_support.InputSample.Width = s->input_width;
221  s->process_support.InputSample.Height = s->input_height;
222  s->process_support.InputFrameRate.Numerator = s->input_framerate.num;
223  s->process_support.InputFrameRate.Denominator = s->input_framerate.den;
224  s->process_support.InputFieldType = D3D12_VIDEO_FIELD_TYPE_NONE;
225  s->process_support.InputStereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE;
226 
227  s->process_support.OutputFormat.Format = s->output_format;
228  s->process_support.OutputFormat.ColorSpace = s->input_colorspace;
229  s->process_support.OutputFrameRate.Numerator = s->input_framerate.num;
230  s->process_support.OutputFrameRate.Denominator = s->input_framerate.den;
231  s->process_support.OutputStereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE;
232 
233  hr = ID3D12VideoDevice_CheckFeatureSupport(
234  s->video_device,
235  D3D12_FEATURE_VIDEO_PROCESS_SUPPORT,
236  &s->process_support,
237  sizeof(s->process_support)
238  );
239 
240  if (FAILED(hr)) {
241  av_log(ctx, AV_LOG_ERROR, "Video process feature not supported: HRESULT 0x%lX\n", hr);
242  return AVERROR_EXTERNAL;
243  }
244 
245  if (!(s->process_support.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED)) {
246  av_log(ctx, AV_LOG_ERROR, "Video process configuration not supported by hardware\n");
247  return AVERROR_EXTERNAL;
248  }
249 
250  D3D12_VIDEO_PROCESS_OUTPUT_STREAM_DESC processor_output_desc = {
251  .Format = s->output_format,
252  .ColorSpace = s->input_colorspace,
253  .AlphaFillMode = D3D12_VIDEO_PROCESS_ALPHA_FILL_MODE_OPAQUE,
254  .AlphaFillModeSourceStreamIndex = 0,
255  .BackgroundColor = { 0.0f, 0.0f, 0.0f, 1.0f },
256  .FrameRate = { s->input_framerate.num, s->input_framerate.den },
257  .EnableStereo = FALSE,
258  };
259 
260  D3D12_VIDEO_PROCESS_INPUT_STREAM_DESC processor_input_desc = {
261  .Format = s->input_format,
262  .ColorSpace = s->input_colorspace,
263  .SourceAspectRatio = { s->input_width, s->input_height },
264  .DestinationAspectRatio = { s->width, s->height },
265  .FrameRate = { s->input_framerate.num, s->input_framerate.den },
266  .StereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE,
267  .FieldType = D3D12_VIDEO_FIELD_TYPE_NONE,
268  .DeinterlaceMode = D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_NONE,
269  .EnableOrientation = FALSE,
270  .FilterFlags = D3D12_VIDEO_PROCESS_FILTER_FLAG_NONE,
271  .SourceSizeRange = {
272  .MaxWidth = s->input_width,
273  .MaxHeight = s->input_height,
274  .MinWidth = s->input_width,
275  .MinHeight = s->input_height
276  },
277  .DestinationSizeRange = {
278  .MaxWidth = s->width,
279  .MaxHeight = s->height,
280  .MinWidth = s->width,
281  .MinHeight = s->height
282  },
283  .EnableAlphaBlending = FALSE,
284  .LumaKey = { .Enable = FALSE, .Lower = 0.0f, .Upper = 1.0f },
285  .NumPastFrames = 0,
286  .NumFutureFrames = 0,
287  .EnableAutoProcessing = FALSE,
288  };
289 
290  /* If pixel aspect ratio adjustment is not supported, set to 1:1 and warn */
291  if (!(s->process_support.FeatureSupport & D3D12_VIDEO_PROCESS_FEATURE_FLAG_PIXEL_ASPECT_RATIO)) {
292  processor_input_desc.SourceAspectRatio.Numerator = 1;
293  processor_input_desc.SourceAspectRatio.Denominator = 1;
294  processor_input_desc.DestinationAspectRatio.Numerator = 1;
295  processor_input_desc.DestinationAspectRatio.Denominator = 1;
296  av_log(ctx, AV_LOG_WARNING, "Pixel aspect ratio adjustment not supported by hardware\n");
297  }
298 
299  hr = ID3D12VideoDevice_CreateVideoProcessor(
300  s->video_device,
301  0,
302  &processor_output_desc,
303  1,
304  &processor_input_desc,
305  &IID_ID3D12VideoProcessor,
306  (void **)&s->video_processor
307  );
308 
309  if (FAILED(hr)) {
310  av_log(ctx, AV_LOG_ERROR, "Failed to create video processor: HRESULT 0x%lX\n", hr);
311  return AVERROR_EXTERNAL;
312  }
313 
314  hr = ID3D12Device_CreateCommandAllocator(
315  s->device,
316  D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
317  &IID_ID3D12CommandAllocator,
318  (void **)&s->command_allocator
319  );
320 
321  if (FAILED(hr)) {
322  av_log(ctx, AV_LOG_ERROR, "Failed to create command allocator: HRESULT 0x%lX\n", hr);
323  return AVERROR_EXTERNAL;
324  }
325 
326  hr = ID3D12Device_CreateCommandList(
327  s->device,
328  0,
329  D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
330  s->command_allocator,
331  NULL,
332  &IID_ID3D12VideoProcessCommandList,
333  (void **)&s->command_list
334  );
335 
336  if (FAILED(hr)) {
337  av_log(ctx, AV_LOG_ERROR, "Failed to create command list: HRESULT 0x%lX\n", hr);
338  return AVERROR_EXTERNAL;
339  }
340 
341  ID3D12VideoProcessCommandList_Close(s->command_list);
342 
343  hr = ID3D12Device_CreateFence(s->device, 0, D3D12_FENCE_FLAG_NONE, &IID_ID3D12Fence, (void **)&s->fence);
344  if (FAILED(hr)) {
345  av_log(ctx, AV_LOG_ERROR, "Failed to create fence: HRESULT 0x%lX\n", hr);
346  return AVERROR_EXTERNAL;
347  }
348 
349  s->fence_value = 1;
350  s->fence_event = CreateEvent(NULL, FALSE, FALSE, NULL);
351  if (!s->fence_event) {
352  av_log(ctx, AV_LOG_ERROR, "Failed to create fence event\n");
353  return AVERROR_EXTERNAL;
354  }
355 
356  av_log(ctx, AV_LOG_VERBOSE, "D3D12 video processor successfully configured\n");
357  return 0;
358 }
359 
361 {
362  AVFilterContext *ctx = inlink->dst;
363  ScaleD3D12Context *s = ctx->priv;
364  AVFilterLink *outlink = ctx->outputs[0];
365  AVFrame *out = NULL;
366  int ret = 0;
367  HRESULT hr;
368 
369  if (!in) {
370  av_log(ctx, AV_LOG_ERROR, "Null input frame\n");
371  return AVERROR(EINVAL);
372  }
373 
374  if (!in->hw_frames_ctx) {
375  av_log(ctx, AV_LOG_ERROR, "No hardware frames context in input frame\n");
376  av_frame_free(&in);
377  return AVERROR(EINVAL);
378  }
379 
381 
382  if (!s->hw_device_ctx) {
383  av_log(ctx, AV_LOG_ERROR, "Filter hardware device context is uninitialized\n");
384  av_frame_free(&in);
385  return AVERROR(EINVAL);
386  }
387 
388  AVHWDeviceContext *input_device_ctx = (AVHWDeviceContext *)frames_ctx->device_ref->data;
389  AVHWDeviceContext *filter_device_ctx = (AVHWDeviceContext *)s->hw_device_ctx->data;
390 
391  if (input_device_ctx->type != filter_device_ctx->type) {
392  av_log(ctx, AV_LOG_ERROR, "Mismatch between input and filter hardware device types\n");
393  av_frame_free(&in);
394  return AVERROR(EINVAL);
395  }
396 
397  out = av_frame_alloc();
398  if (!out) {
399  av_log(ctx, AV_LOG_ERROR, "Failed to allocate output frame\n");
400  av_frame_free(&in);
401  return AVERROR(ENOMEM);
402  }
403 
404  ret = av_hwframe_get_buffer(s->hw_frames_ctx_out, out, 0);
405  if (ret < 0) {
406  av_log(ctx, AV_LOG_ERROR, "Failed to get output frame from pool\n");
407  goto fail;
408  }
409 
410  if (!s->video_processor) {
411  AVHWFramesContext *input_frames_ctx = (AVHWFramesContext *)in->hw_frames_ctx->data;
412 
413  s->input_width = input_frames_ctx->width;
414  s->input_height = input_frames_ctx->height;
415 
416  AVD3D12VAFramesContext *input_hwctx = (AVD3D12VAFramesContext *)input_frames_ctx->hwctx;
417  s->input_format = input_hwctx->format;
418 
419  if (s->input_format == DXGI_FORMAT_UNKNOWN) {
420  switch (input_frames_ctx->sw_format) {
421  case AV_PIX_FMT_NV12:
422  s->input_format = DXGI_FORMAT_NV12;
423  break;
424  case AV_PIX_FMT_P010:
425  s->input_format = DXGI_FORMAT_P010;
426  break;
427  default:
428  av_log(ctx, AV_LOG_ERROR, "Unsupported input format\n");
429  ret = AVERROR(EINVAL);
430  goto fail;
431  }
432  }
433 
434  int is_10bit = (s->input_format == DXGI_FORMAT_P010);
435  s->input_colorspace = get_dxgi_colorspace(in->colorspace, in->color_trc, is_10bit);
436 
437  s->input_framerate = get_input_framerate(ctx, inlink, in);
438 
439  av_log(ctx, AV_LOG_VERBOSE, "Input format: %dx%d, DXGI format: %d, colorspace: %d, framerate: %d/%d\n",
440  s->input_width, s->input_height, s->input_format, s->input_colorspace,
441  s->input_framerate.num, s->input_framerate.den);
442 
444  if (ret < 0) {
445  av_log(ctx, AV_LOG_ERROR, "Failed to configure processor\n");
446  goto fail;
447  }
448  }
449 
450  AVD3D12VAFrame *input_frame = (AVD3D12VAFrame *)in->data[0];
452 
453  if (!input_frame || !output_frame) {
454  av_log(ctx, AV_LOG_ERROR, "Invalid frame pointers\n");
455  ret = AVERROR(EINVAL);
456  goto fail;
457  }
458 
459  ID3D12Resource *input_resource = input_frame->texture;
460  ID3D12Resource *output_resource = output_frame->texture;
461 
462  if (!input_resource || !output_resource) {
463  av_log(ctx, AV_LOG_ERROR, "Invalid D3D12 resources in frames\n");
464  ret = AVERROR(EINVAL);
465  goto fail;
466  }
467 
468  /* Wait for input frame's fence before accessing it */
469  if (input_frame->sync_ctx.fence && input_frame->sync_ctx.fence_value > 0) {
470  UINT64 completed = ID3D12Fence_GetCompletedValue(input_frame->sync_ctx.fence);
471  if (completed < input_frame->sync_ctx.fence_value) {
472  hr = ID3D12CommandQueue_Wait(s->command_queue, input_frame->sync_ctx.fence, input_frame->sync_ctx.fence_value);
473  if (FAILED(hr)) {
474  av_log(ctx, AV_LOG_ERROR, "Failed to wait for input fence: HRESULT 0x%lX\n", hr);
476  goto fail;
477  }
478  }
479  }
480 
481  hr = ID3D12CommandAllocator_Reset(s->command_allocator);
482  if (FAILED(hr)) {
483  av_log(ctx, AV_LOG_ERROR, "Failed to reset command allocator: HRESULT 0x%lX\n", hr);
485  goto fail;
486  }
487 
488  hr = ID3D12VideoProcessCommandList_Reset(s->command_list, s->command_allocator);
489  if (FAILED(hr)) {
490  av_log(ctx, AV_LOG_ERROR, "Failed to reset command list: HRESULT 0x%lX\n", hr);
492  goto fail;
493  }
494 
495  D3D12_RESOURCE_BARRIER barriers[2] = {
496  {
497  .Type = D3D12_RESOURCE_BARRIER_TYPE_TRANSITION,
498  .Flags = D3D12_RESOURCE_BARRIER_FLAG_NONE,
499  .Transition = {
500  .pResource = input_resource,
501  .Subresource = D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
502  .StateBefore = D3D12_RESOURCE_STATE_COMMON,
503  .StateAfter = D3D12_RESOURCE_STATE_VIDEO_PROCESS_READ
504  }
505  },
506  {
507  .Type = D3D12_RESOURCE_BARRIER_TYPE_TRANSITION,
508  .Flags = D3D12_RESOURCE_BARRIER_FLAG_NONE,
509  .Transition = {
510  .pResource = output_resource,
511  .Subresource = D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
512  .StateBefore = D3D12_RESOURCE_STATE_COMMON,
513  .StateAfter = D3D12_RESOURCE_STATE_VIDEO_PROCESS_WRITE
514  }
515  }
516  };
517 
518  ID3D12VideoProcessCommandList_ResourceBarrier(s->command_list, 2, barriers);
519 
520  D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS input_args = {0};
521 
522  input_args.InputStream[0].pTexture2D = input_resource;
523  input_args.Transform.SourceRectangle.right = s->input_width;
524  input_args.Transform.SourceRectangle.bottom = s->input_height;
525  input_args.Transform.DestinationRectangle.right = s->width;
526  input_args.Transform.DestinationRectangle.bottom = s->height;
527  input_args.Transform.Orientation = D3D12_VIDEO_PROCESS_ORIENTATION_DEFAULT;
528 
529  input_args.Flags = D3D12_VIDEO_PROCESS_INPUT_STREAM_FLAG_NONE;
530 
531  input_args.RateInfo.OutputIndex = 0;
532  input_args.RateInfo.InputFrameOrField = 0;
533 
534  memset(input_args.FilterLevels, 0, sizeof(input_args.FilterLevels));
535 
536  input_args.AlphaBlending.Enable = FALSE;
537  input_args.AlphaBlending.Alpha = 1.0f;
538 
539  D3D12_VIDEO_PROCESS_OUTPUT_STREAM_ARGUMENTS output_args = {0};
540 
541  output_args.OutputStream[0].pTexture2D = output_resource;
542  output_args.TargetRectangle.right = s->width;
543  output_args.TargetRectangle.bottom = s->height;
544 
545  ID3D12VideoProcessCommandList_ProcessFrames(
546  s->command_list,
547  s->video_processor,
548  &output_args,
549  1,
550  &input_args
551  );
552 
553  for (int i = 0; i < 2; i++) {
554  FFSWAP(D3D12_RESOURCE_STATES, barriers[i].Transition.StateBefore, barriers[i].Transition.StateAfter);
555  }
556  ID3D12VideoProcessCommandList_ResourceBarrier(s->command_list, 2, barriers);
557 
558  hr = ID3D12VideoProcessCommandList_Close(s->command_list);
559  if (FAILED(hr)) {
560  av_log(ctx, AV_LOG_ERROR, "Failed to close command list: HRESULT 0x%lX\n", hr);
562  goto fail;
563  }
564 
565  ID3D12CommandList *cmd_lists[] = { (ID3D12CommandList *)s->command_list };
566  ID3D12CommandQueue_ExecuteCommandLists(s->command_queue, 1, cmd_lists);
567 
568  hr = ID3D12CommandQueue_Signal(s->command_queue, s->fence, s->fence_value);
569  if (FAILED(hr)) {
570  av_log(ctx, AV_LOG_ERROR, "Failed to signal fence: HRESULT 0x%lX\n", hr);
572  goto fail;
573  }
574 
575  output_frame->sync_ctx.fence = s->fence;
576  output_frame->sync_ctx.fence_value = s->fence_value;
577  ID3D12Fence_AddRef(s->fence); ///< Increment reference count
578 
579  s->fence_value++;
580 
581  ret = av_frame_copy_props(out, in);
582  if (ret < 0) {
583  av_log(ctx, AV_LOG_ERROR, "Failed to copy frame properties\n");
584  goto fail;
585  }
586 
587  out->width = s->width;
588  out->height = s->height;
589  out->format = AV_PIX_FMT_D3D12;
590 
591  av_frame_free(&in);
592 
593  return ff_filter_frame(outlink, out);
594 
595 fail:
596  av_frame_free(&in);
597  av_frame_free(&out);
598  return ret;
599 }
600 
602 {
603  AVFilterContext *ctx = outlink->src;
604  ScaleD3D12Context *s = ctx->priv;
605  AVFilterLink *inlink = ctx->inputs[0];
607  FilterLink *outl = ff_filter_link(outlink);
608  int ret;
609 
611 
612  av_buffer_unref(&s->hw_frames_ctx_out);
613  av_buffer_unref(&s->hw_device_ctx);
614 
615  ret = ff_scale_eval_dimensions(s, s->w_expr, s->h_expr, inlink, outlink, &s->width, &s->height);
616  if (ret < 0) {
617  av_log(ctx, AV_LOG_ERROR, "Failed to evaluate dimensions\n");
618  return ret;
619  }
620 
621  /* Adjust dimensions to meet codec/hardware alignment requirements */
622  ff_scale_adjust_dimensions(inlink, &s->width, &s->height, 0, 1, 1.f);
623 
624  outlink->w = s->width;
625  outlink->h = s->height;
626 
627  if (!inl->hw_frames_ctx) {
628  av_log(ctx, AV_LOG_ERROR, "No hw_frames_ctx available on input link\n");
629  return AVERROR(EINVAL);
630  }
631 
632  if (!s->hw_device_ctx) {
633  AVHWFramesContext *in_frames_ctx = (AVHWFramesContext *)inl->hw_frames_ctx->data;
634  s->hw_device_ctx = av_buffer_ref(in_frames_ctx->device_ref);
635  if (!s->hw_device_ctx) {
636  av_log(ctx, AV_LOG_ERROR, "Failed to initialize filter hardware device context\n");
637  return AVERROR(ENOMEM);
638  }
639  }
640 
641  AVHWDeviceContext *hwctx = (AVHWDeviceContext *)s->hw_device_ctx->data;
642  AVD3D12VADeviceContext *d3d12_hwctx = (AVD3D12VADeviceContext *)hwctx->hwctx;
643 
644  s->device = d3d12_hwctx->device;
645 
646  if (!s->device) {
647  av_log(ctx, AV_LOG_ERROR, "Failed to get valid D3D12 device\n");
648  return AVERROR(EINVAL);
649  }
650 
651  s->hw_frames_ctx_out = av_hwframe_ctx_alloc(s->hw_device_ctx);
652  if (!s->hw_frames_ctx_out)
653  return AVERROR(ENOMEM);
654 
655  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)s->hw_frames_ctx_out->data;
656  AVHWFramesContext *in_frames_ctx = (AVHWFramesContext *)inl->hw_frames_ctx->data;
657 
658  if (s->format == AV_PIX_FMT_NONE) {
659  /* If format is not specified, use the same format as input */
660  frames_ctx->sw_format = in_frames_ctx->sw_format;
661  s->format = in_frames_ctx->sw_format;
662  av_log(ctx, AV_LOG_VERBOSE, "D3D12 scale output format not specified, using input format: %s\n",
663  av_get_pix_fmt_name(s->format));
664  } else {
665  frames_ctx->sw_format = s->format;
666  }
667 
668  /* Set output format based on sw_format */
669  switch (frames_ctx->sw_format) {
670  case AV_PIX_FMT_NV12:
671  s->output_format = DXGI_FORMAT_NV12;
672  break;
673  case AV_PIX_FMT_P010:
674  s->output_format = DXGI_FORMAT_P010;
675  break;
676  default:
677  av_log(ctx, AV_LOG_ERROR, "Unsupported output format: %s\n",
678  av_get_pix_fmt_name(frames_ctx->sw_format));
679  av_buffer_unref(&s->hw_frames_ctx_out);
680  return AVERROR(EINVAL);
681  }
682 
683  frames_ctx->width = s->width;
684  frames_ctx->height = s->height;
685  frames_ctx->format = AV_PIX_FMT_D3D12;
686  frames_ctx->initial_pool_size = 10;
687 
688  if (ctx->extra_hw_frames > 0)
689  frames_ctx->initial_pool_size += ctx->extra_hw_frames;
690 
691  AVD3D12VAFramesContext *frames_hwctx = frames_ctx->hwctx;
692 
693  /*
694  * Set D3D12 resource flags for video processing
695  * ALLOW_RENDER_TARGET is needed for video processor output
696  */
697  frames_hwctx->format = s->output_format;
698  frames_hwctx->resource_flags = D3D12_RESOURCE_FLAG_ALLOW_RENDER_TARGET;
699  frames_hwctx->heap_flags = D3D12_HEAP_FLAG_NONE;
700 
701  ret = av_hwframe_ctx_init(s->hw_frames_ctx_out);
702  if (ret < 0) {
703  av_buffer_unref(&s->hw_frames_ctx_out);
704  return ret;
705  }
706 
707  outl->hw_frames_ctx = av_buffer_ref(s->hw_frames_ctx_out);
708  if (!outl->hw_frames_ctx)
709  return AVERROR(ENOMEM);
710 
711  av_log(ctx, AV_LOG_VERBOSE, "D3D12 scale config: %dx%d -> %dx%d\n",
712  inlink->w, inlink->h, outlink->w, outlink->h);
713  return 0;
714 }
715 
717  ScaleD3D12Context *s = ctx->priv;
718 
720 
721  av_buffer_unref(&s->hw_frames_ctx_out);
722  av_buffer_unref(&s->hw_device_ctx);
723 
724  av_freep(&s->w_expr);
725  av_freep(&s->h_expr);
726 }
727 
728 static const AVFilterPad scale_d3d12_inputs[] = {
729  {
730  .name = "default",
731  .type = AVMEDIA_TYPE_VIDEO,
732  .filter_frame = scale_d3d12_filter_frame,
733  },
734 };
735 
737  {
738  .name = "default",
739  .type = AVMEDIA_TYPE_VIDEO,
740  .config_props = scale_d3d12_config_props,
741  },
742 };
743 
744 #define OFFSET(x) offsetof(ScaleD3D12Context, x)
745 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
746 
747 static const AVOption scale_d3d12_options[] = {
748  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, {.str = "iw"}, .flags = FLAGS },
749  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, {.str = "ih"}, .flags = FLAGS },
750  { "format", "Output video pixel format", OFFSET(format), AV_OPT_TYPE_PIXEL_FMT, { .i64 = AV_PIX_FMT_NONE }, INT_MIN, INT_MAX, .flags=FLAGS },
751  { NULL }
752 };
753 
754 AVFILTER_DEFINE_CLASS(scale_d3d12);
755 
757  .p.name = "scale_d3d12",
758  .p.description = NULL_IF_CONFIG_SMALL("Scale video using Direct3D12"),
759  .priv_size = sizeof(ScaleD3D12Context),
760  .p.priv_class = &scale_d3d12_class,
761  .init = scale_d3d12_init,
762  .uninit = scale_d3d12_uninit,
766  .p.flags = AVFILTER_FLAG_HWDEVICE,
767  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
768 };
ScaleD3D12Context::input_width
int input_width
Definition: vf_scale_d3d12.c:58
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:682
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
ff_vf_scale_d3d12
const FFFilter ff_vf_scale_d3d12
Definition: vf_scale_d3d12.c:756
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:666
out
FILE * out
Definition: movenc.c:55
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
AVFrame::duration
int64_t duration
Duration of the frame, in the same units as pts.
Definition: frame.h:775
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
scale_d3d12_uninit
static av_cold void scale_d3d12_uninit(AVFilterContext *ctx)
Definition: vf_scale_d3d12.c:716
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
scale_d3d12_outputs
static const AVFilterPad scale_d3d12_outputs[]
Definition: vf_scale_d3d12.c:736
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
scale_d3d12_inputs
static const AVFilterPad scale_d3d12_inputs[]
Definition: vf_scale_d3d12.c:728
AVFrame::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:689
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:337
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:263
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
pixdesc.h
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:263
ScaleD3D12Context::format
enum AVPixelFormat format
Definition: vf_scale_d3d12.c:37
AVOption
AVOption.
Definition: opt.h:429
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
ff_scale_eval_dimensions
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
Definition: scale_eval.c:57
get_input_framerate
static AVRational get_input_framerate(AVFilterContext *ctx, AVFilterLink *inlink, AVFrame *in)
Definition: vf_scale_d3d12.c:156
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:220
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:220
video.h
AVCOL_SPC_BT2020_CL
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:712
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:448
ScaleD3D12Context::video_processor
ID3D12VideoProcessor * video_processor
Definition: vf_scale_d3d12.c:42
ScaleD3D12Context::w_expr
char * w_expr
Definition: vf_scale_d3d12.c:35
ScaleD3D12Context::fence_value
UINT64 fence_value
Definition: vf_scale_d3d12.c:49
ScaleD3D12Context::input_framerate
AVRational input_framerate
Definition: vf_scale_d3d12.c:64
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:706
fail
#define fail()
Definition: checkasm.h:214
scale_d3d12_configure_processor
static int scale_d3d12_configure_processor(ScaleD3D12Context *s, AVFilterContext *ctx)
Definition: vf_scale_d3d12.c:182
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
scale_d3d12_config_props
static int scale_d3d12_config_props(AVFilterLink *outlink)
Definition: vf_scale_d3d12.c:601
ScaleD3D12Context::video_device
ID3D12VideoDevice * video_device
Definition: vf_scale_d3d12.c:41
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:39
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
av_cold
#define av_cold
Definition: attributes.h:106
AVHWFramesContext::height
int height
Definition: hwcontext.h:220
FFFilter
Definition: filters.h:266
scale_d3d12_filter_frame
static int scale_d3d12_filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_scale_d3d12.c:360
s
#define s(width, name)
Definition: cbs_vp9.c:198
ScaleD3D12Context::command_queue
ID3D12CommandQueue * command_queue
Definition: vf_scale_d3d12.c:43
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:707
ScaleD3D12Context
Definition: vf_scale_d3d12.c:33
OFFSET
#define OFFSET(x)
Definition: vf_scale_d3d12.c:744
AVD3D12VAFrame::sync_ctx
AVD3D12VASyncContext sync_ctx
The sync context for the texture.
Definition: hwcontext_d3d12va.h:159
filters.h
ctx
AVFormatContext * ctx
Definition: movenc.c:49
ScaleD3D12Context::process_support
D3D12_FEATURE_DATA_VIDEO_PROCESS_SUPPORT process_support
Definition: vf_scale_d3d12.c:67
AVD3D12VASyncContext::fence
ID3D12Fence * fence
D3D12 fence object.
Definition: hwcontext_d3d12va.h:108
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:264
get_dxgi_colorspace
static DXGI_COLOR_SPACE_TYPE get_dxgi_colorspace(enum AVColorSpace colorspace, enum AVColorTransferCharacteristic trc, int is_10bit)
Definition: vf_scale_d3d12.c:128
ScaleD3D12Context::h_expr
char * h_expr
Definition: vf_scale_d3d12.c:36
if
if(ret)
Definition: filter_design.txt:179
ScaleD3D12Context::command_allocator
ID3D12CommandAllocator * command_allocator
Definition: vf_scale_d3d12.c:45
framerate
float framerate
Definition: av1_levels.c:29
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
ScaleD3D12Context::width
int width
Definition: vf_scale_d3d12.c:57
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:213
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:599
format
New swscale design to change SwsGraph is what coordinates multiple passes These can include cascaded scaling error diffusion and so on Or we could have separate passes for the vertical and horizontal scaling In between each SwsPass lies a fully allocated image buffer Graph passes may have different levels of e g we can have a single threaded error diffusion pass following a multi threaded scaling pass SwsGraph is internally recreated whenever the image format
Definition: swscale-v2.txt:14
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVD3D12VAFramesContext::heap_flags
D3D12_HEAP_FLAGS heap_flags
Options for working with heaps allocation when creating resources.
Definition: hwcontext_d3d12va.h:193
ScaleD3D12Context::classCtx
const AVClass * classCtx
Definition: vf_scale_d3d12.c:34
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:129
ScaleD3D12Context::output_format
DXGI_FORMAT output_format
Definition: vf_scale_d3d12.c:60
ScaleD3D12Context::fence_event
HANDLE fence_event
Definition: vf_scale_d3d12.c:50
AVD3D12VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d12va.h:172
AV_PIX_FMT_D3D12
@ AV_PIX_FMT_D3D12
Hardware surfaces for Direct3D 12.
Definition: pixfmt.h:440
AVD3D12VAFrame::texture
ID3D12Resource * texture
The texture in which the frame is located.
Definition: hwcontext_d3d12va.h:144
hwcontext_d3d12va.h
AVD3D12VAFramesContext::resource_flags
D3D12_RESOURCE_FLAGS resource_flags
Options for working with resources.
Definition: hwcontext_d3d12va.h:185
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:198
ScaleD3D12Context::input_colorspace
DXGI_COLOR_SPACE_TYPE input_colorspace
Definition: vf_scale_d3d12.c:63
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: filters.h:207
ScaleD3D12Context::hw_frames_ctx_out
AVBufferRef * hw_frames_ctx_out
Definition: vf_scale_d3d12.c:54
ScaleD3D12Context::device
ID3D12Device * device
Definition: vf_scale_d3d12.c:40
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:683
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AVFILTER_FLAG_HWDEVICE
#define AVFILTER_FLAG_HWDEVICE
The filter can create hardware frames using AVFilterContext.hw_device_ctx.
Definition: avfilter.h:188
scale_eval.h
output_frame
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:861
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AVD3D12VAFrame
D3D12VA frame descriptor for pool allocation.
Definition: hwcontext_d3d12va.h:138
FLAGS
#define FLAGS
Definition: vf_scale_d3d12.c:745
AVD3D12VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d12va.h:43
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:711
release_d3d12_resources
static void release_d3d12_resources(ScaleD3D12Context *s)
Definition: vf_scale_d3d12.c:74
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:700
ScaleD3D12Context::command_list
ID3D12VideoProcessCommandList * command_list
Definition: vf_scale_d3d12.c:44
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:45
AVD3D12VAFramesContext::format
DXGI_FORMAT format
DXGI_FORMAT format.
Definition: hwcontext_d3d12va.h:177
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ScaleD3D12Context::input_height
int input_height
Definition: vf_scale_d3d12.c:58
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:153
ScaleD3D12Context::hw_device_ctx
AVBufferRef * hw_device_ctx
Definition: vf_scale_d3d12.c:53
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:724
AVCOL_TRC_ARIB_STD_B67
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
Definition: pixfmt.h:687
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
ScaleD3D12Context::fence
ID3D12Fence * fence
Definition: vf_scale_d3d12.c:48
Windows::Graphics::DirectX::Direct3D11::p
IDirect3DDxgiInterfaceAccess _COM_Outptr_ void ** p
Definition: vsrc_gfxcapture_winrt.hpp:53
AV_OPT_TYPE_PIXEL_FMT
@ AV_OPT_TYPE_PIXEL_FMT
Underlying C type is enum AVPixelFormat.
Definition: opt.h:307
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:602
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:190
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:270
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
ScaleD3D12Context::input_format
DXGI_FORMAT input_format
Definition: vf_scale_d3d12.c:59
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
WaitForSingleObject
#define WaitForSingleObject(a, b)
Definition: w32pthreads.h:64
scale_d3d12_init
static av_cold int scale_d3d12_init(AVFilterContext *ctx)
Definition: vf_scale_d3d12.c:70
hwcontext.h
scale_d3d12_options
static const AVOption scale_d3d12_options[]
Definition: vf_scale_d3d12.c:747
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(scale_d3d12)
AVD3D12VASyncContext::fence_value
uint64_t fence_value
The fence value used for sync.
Definition: hwcontext_d3d12va.h:119
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
Definition: opt.h:276
hwcontext_d3d12va_internal.h
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:506
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: filters.h:253
ScaleD3D12Context::height
int height
Definition: vf_scale_d3d12.c:57
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by, double w_adj)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3376