FFmpeg
vf_scale_d3d11.c
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2025 MulticorewWare, Inc.
3  *
4  * Authors: Dash Santosh <dash.sathanatayanan@multicorewareinc.com>
5  * Sachin <sachin.prakash@multicorewareinc.com>
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 #include "compat/w32dlfcn.h"
27 
28 #include "libavutil/hwcontext.h"
30 
31 #include "filters.h"
32 #include "scale_eval.h"
33 #include "video.h"
34 
35 typedef struct ScaleD3D11Context {
36  const AVClass *classCtx;
37  char *w_expr;
38  char *h_expr;
40 
41  ///< D3D11 objects
43  ID3D11DeviceContext *context;
44  ID3D11VideoDevice *videoDevice;
45  ID3D11VideoProcessor *processor;
46  ID3D11VideoProcessorEnumerator *enumerator;
47  ID3D11VideoProcessorOutputView *outputView;
48  ID3D11VideoProcessorInputView *inputView;
49 
50  ///< Buffer references
53 
54  ///< Dimensions and formats
55  int width, height;
57  DXGI_FORMAT input_format;
58  DXGI_FORMAT output_format;
60 
62  ///< all real work is done in config_props and filter_frame
63  return 0;
64 }
65 
67  if (s->outputView) {
68  s->outputView->lpVtbl->Release(s->outputView);
69  s->outputView = NULL;
70  }
71 
72  if (s->processor) {
73  s->processor->lpVtbl->Release(s->processor);
74  s->processor = NULL;
75  }
76 
77  if (s->enumerator) {
78  s->enumerator->lpVtbl->Release(s->enumerator);
79  s->enumerator = NULL;
80  }
81 
82  if (s->videoDevice) {
83  s->videoDevice->lpVtbl->Release(s->videoDevice);
84  s->videoDevice = NULL;
85  }
86 }
87 
89  HRESULT hr;
90 
91  switch (s->format) {
92  case AV_PIX_FMT_NV12:
93  s->output_format = DXGI_FORMAT_NV12;
94  break;
95  case AV_PIX_FMT_P010:
96  s->output_format = DXGI_FORMAT_P010;
97  break;
98  default:
99  av_log(ctx, AV_LOG_ERROR, "Invalid output format specified\n");
100  return AVERROR(EINVAL);
101  }
102 
103  ///< Get D3D11 device and context from hardware device context
104  AVHWDeviceContext *hwctx = (AVHWDeviceContext *)s->hw_device_ctx->data;
105  AVD3D11VADeviceContext *d3d11_hwctx = (AVD3D11VADeviceContext *)hwctx->hwctx;
106  s->device = d3d11_hwctx->device;
107  s->context = d3d11_hwctx->device_context;
108 
109  av_log(ctx, AV_LOG_VERBOSE, "Configuring D3D11 video processor: %dx%d -> %dx%d\n",
110  s->inputWidth, s->inputHeight, s->width, s->height);
111 
112  ///< Define the video processor content description
113  D3D11_VIDEO_PROCESSOR_CONTENT_DESC contentDesc = {
114  .InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE,
115  .InputWidth = s->inputWidth,
116  .InputHeight = s->inputHeight,
117  .OutputWidth = s->width,
118  .OutputHeight = s->height,
119  .Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL,
120  };
121 
122  ///< Query video device interface
123  hr = s->device->lpVtbl->QueryInterface(s->device, &IID_ID3D11VideoDevice, (void **)&s->videoDevice);
124  if (FAILED(hr)) {
125  av_log(ctx, AV_LOG_ERROR, "Failed to get D3D11 video device interface: HRESULT 0x%lX\n", hr);
126  return AVERROR_EXTERNAL;
127  }
128 
129  ///< Create video processor enumerator
130  hr = s->videoDevice->lpVtbl->CreateVideoProcessorEnumerator(s->videoDevice, &contentDesc, &s->enumerator);
131  if (FAILED(hr)) {
132  av_log(ctx, AV_LOG_ERROR, "Failed to create video processor enumerator: HRESULT 0x%lX\n", hr);
133  return AVERROR_EXTERNAL;
134  }
135 
136  ///< Create the video processor
137  hr = s->videoDevice->lpVtbl->CreateVideoProcessor(s->videoDevice, s->enumerator, 0, &s->processor);
138  if (FAILED(hr)) {
139  av_log(ctx, AV_LOG_ERROR, "Failed to create video processor: HRESULT 0x%lX\n", hr);
140  return AVERROR_EXTERNAL;
141  }
142 
143  av_log(ctx, AV_LOG_VERBOSE, "D3D11 video processor successfully configured\n");
144  return 0;
145 }
146 
148 {
149  AVFilterContext *ctx = inlink->dst;
150  ScaleD3D11Context *s = ctx->priv;
151  AVFilterLink *outlink = ctx->outputs[0];
152  ID3D11VideoProcessorInputView *inputView = NULL;
153  ID3D11VideoContext *videoContext = NULL;
154  AVFrame *out = NULL;
155  int ret = 0;
156  HRESULT hr;
157 
158  ///< Validate input frame
159  if (!in) {
160  av_log(ctx, AV_LOG_ERROR, "Null input frame\n");
161  return AVERROR(EINVAL);
162  }
163 
164  if (!in->hw_frames_ctx) {
165  av_log(ctx, AV_LOG_ERROR, "No hardware frames context in input frame\n");
166  av_frame_free(&in);
167  return AVERROR(EINVAL);
168  }
169 
170  ///< Verify hardware device contexts
172 
173  if (!s->hw_device_ctx) {
174  av_log(ctx, AV_LOG_ERROR, "Filter hardware device context is uninitialized\n");
175  av_frame_free(&in);
176  return AVERROR(EINVAL);
177  }
178 
179  AVHWDeviceContext *input_device_ctx = (AVHWDeviceContext *)frames_ctx->device_ref->data;
180  AVHWDeviceContext *filter_device_ctx = (AVHWDeviceContext *)s->hw_device_ctx->data;
181 
182  if (input_device_ctx->type != filter_device_ctx->type) {
183  av_log(ctx, AV_LOG_ERROR, "Mismatch between input and filter hardware device types\n");
184  av_frame_free(&in);
185  return AVERROR(EINVAL);
186  }
187 
188  ///< Allocate output frame
189  out = av_frame_alloc();
190  if (!out) {
191  av_log(ctx, AV_LOG_ERROR, "Failed to allocate output frame\n");
192  av_frame_free(&in);
193  return AVERROR(ENOMEM);
194  }
195 
196  ret = av_hwframe_get_buffer(s->hw_frames_ctx_out, out, 0);
197  if (ret < 0) {
198  av_log(ctx, AV_LOG_ERROR, "Failed to get output frame from pool\n");
199  goto fail;
200  }
201 
202  ///< Configure the D3D11 video processor if not already configured
203  if (!s->processor) {
204  ///< Get info from input texture
205  D3D11_TEXTURE2D_DESC textureDesc;
206  ID3D11Texture2D *input_texture = (ID3D11Texture2D *)in->data[0];
207  input_texture->lpVtbl->GetDesc(input_texture, &textureDesc);
208 
209  s->inputWidth = textureDesc.Width;
210  s->inputHeight = textureDesc.Height;
211  s->input_format = textureDesc.Format;
212 
214  if (ret < 0) {
215  av_log(ctx, AV_LOG_ERROR, "Failed to configure processor\n");
216  goto fail;
217  }
218  }
219 
220  ///< Get input texture and prepare input view
221  ID3D11Texture2D *d3d11_texture = (ID3D11Texture2D *)in->data[0];
222  int subIdx = (int)(intptr_t)in->data[1];
223 
224  D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc = {
225  .FourCC = s->input_format,
226  .ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D,
227  .Texture2D.ArraySlice = subIdx
228  };
229 
230  hr = s->videoDevice->lpVtbl->CreateVideoProcessorInputView(
231  s->videoDevice, (ID3D11Resource *)d3d11_texture, s->enumerator, &inputViewDesc, &inputView);
232  if (FAILED(hr)) {
233  av_log(ctx, AV_LOG_ERROR, "Failed to create input view: HRESULT 0x%lX\n", hr);
235  goto fail;
236  }
237 
238  ///< Create output view for current texture
239  ID3D11Texture2D *output_texture = (ID3D11Texture2D *)out->data[0];
240  D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputViewDesc = {
241  .ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D,
242  .Texture2D = { .MipSlice = 0 },
243  };
244 
245  hr = s->videoDevice->lpVtbl->CreateVideoProcessorOutputView(
246  s->videoDevice, (ID3D11Resource *)output_texture, s->enumerator, &outputViewDesc, &s->outputView);
247  if (FAILED(hr)) {
248  av_log(ctx, AV_LOG_ERROR, "Failed to create output view: HRESULT 0x%lX\n", hr);
250  goto fail;
251  }
252 
253  ///< Set up processing stream
254  D3D11_VIDEO_PROCESSOR_STREAM stream = {
255  .Enable = TRUE,
256  .pInputSurface = inputView,
257  .OutputIndex = 0
258  };
259 
260  ///< Get video context
261  hr = s->context->lpVtbl->QueryInterface(s->context, &IID_ID3D11VideoContext, (void **)&videoContext);
262  if (FAILED(hr)) {
263  av_log(ctx, AV_LOG_ERROR, "Failed to get video context: HRESULT 0x%lX\n", hr);
265  goto fail;
266  }
267 
268  ///< Process the frame
269  hr = videoContext->lpVtbl->VideoProcessorBlt(videoContext, s->processor, s->outputView, 0, 1, &stream);
270  if (FAILED(hr)) {
271  av_log(ctx, AV_LOG_ERROR, "VideoProcessorBlt failed: HRESULT 0x%lX\n", hr);
273  goto fail;
274  }
275 
276  ///< Set up output frame
277  ret = av_frame_copy_props(out, in);
278  if (ret < 0) {
279  av_log(ctx, AV_LOG_ERROR, "Failed to copy frame properties\n");
280  goto fail;
281  }
282 
283  out->data[0] = (uint8_t *)output_texture;
284  out->data[1] = (uint8_t *)(intptr_t)0;
285  out->width = s->width;
286  out->height = s->height;
287  out->format = AV_PIX_FMT_D3D11;
288 
289  ///< Clean up resources
290  inputView->lpVtbl->Release(inputView);
291  videoContext->lpVtbl->Release(videoContext);
292  if (s->outputView) {
293  s->outputView->lpVtbl->Release(s->outputView);
294  s->outputView = NULL;
295  }
296  av_frame_free(&in);
297 
298  ///< Forward the frame
299  return ff_filter_frame(outlink, out);
300 
301 fail:
302  if (inputView)
303  inputView->lpVtbl->Release(inputView);
304  if (videoContext)
305  videoContext->lpVtbl->Release(videoContext);
306  if (s->outputView) {
307  s->outputView->lpVtbl->Release(s->outputView);
308  s->outputView = NULL;
309  }
310  av_frame_free(&in);
311  av_frame_free(&out);
312  return ret;
313 }
314 
316 {
317  AVFilterContext *ctx = outlink->src;
318  ScaleD3D11Context *s = ctx->priv;
319  AVFilterLink *inlink = ctx->inputs[0];
321  FilterLink *outl = ff_filter_link(outlink);
322  int ret;
323 
324  ///< Clean up any previous resources
326 
327  ///< Evaluate output dimensions
328  ret = ff_scale_eval_dimensions(s, s->w_expr, s->h_expr, inlink, outlink, &s->width, &s->height);
329  if (ret < 0) {
330  av_log(ctx, AV_LOG_ERROR, "Failed to evaluate dimensions\n");
331  return ret;
332  }
333 
334  outlink->w = s->width;
335  outlink->h = s->height;
336 
337  ///< Validate input hw_frames_ctx
338  if (!inl->hw_frames_ctx) {
339  av_log(ctx, AV_LOG_ERROR, "No hw_frames_ctx available on input link\n");
340  return AVERROR(EINVAL);
341  }
342 
343  ///< Propagate hw_frames_ctx to output
345  if (!outl->hw_frames_ctx) {
346  av_log(ctx, AV_LOG_ERROR, "Failed to propagate hw_frames_ctx to output\n");
347  return AVERROR(ENOMEM);
348  }
349 
350  ///< Initialize filter's hardware device context
351  if (!s->hw_device_ctx) {
352  AVHWFramesContext *in_frames_ctx = (AVHWFramesContext *)inl->hw_frames_ctx->data;
353  s->hw_device_ctx = av_buffer_ref(in_frames_ctx->device_ref);
354  if (!s->hw_device_ctx) {
355  av_log(ctx, AV_LOG_ERROR, "Failed to initialize filter hardware device context\n");
356  return AVERROR(ENOMEM);
357  }
358  }
359 
360  ///< Get D3D11 device and context (but don't initialize processor yet - done in filter_frame)
361  AVHWDeviceContext *hwctx = (AVHWDeviceContext *)s->hw_device_ctx->data;
362  AVD3D11VADeviceContext *d3d11_hwctx = (AVD3D11VADeviceContext *)hwctx->hwctx;
363 
364  s->device = d3d11_hwctx->device;
365  s->context = d3d11_hwctx->device_context;
366 
367  if (!s->device || !s->context) {
368  av_log(ctx, AV_LOG_ERROR, "Failed to get valid D3D11 device or context\n");
369  return AVERROR(EINVAL);
370  }
371 
372  ///< Create new hardware frames context for output
373  s->hw_frames_ctx_out = av_hwframe_ctx_alloc(s->hw_device_ctx);
374  if (!s->hw_frames_ctx_out)
375  return AVERROR(ENOMEM);
376 
377  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)s->hw_frames_ctx_out->data;
378  frames_ctx->format = AV_PIX_FMT_D3D11;
379  frames_ctx->sw_format = s->format;
380  frames_ctx->width = s->width;
381  frames_ctx->height = s->height;
382  frames_ctx->initial_pool_size = 10;
383 
384  if (ctx->extra_hw_frames > 0)
385  frames_ctx->initial_pool_size += ctx->extra_hw_frames;
386 
387  AVD3D11VAFramesContext *frames_hwctx = frames_ctx->hwctx;
388  frames_hwctx->MiscFlags = 0;
389  frames_hwctx->BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_VIDEO_ENCODER;
390 
391  ret = av_hwframe_ctx_init(s->hw_frames_ctx_out);
392  if (ret < 0) {
393  av_buffer_unref(&s->hw_frames_ctx_out);
394  return ret;
395  }
396 
397  outl->hw_frames_ctx = av_buffer_ref(s->hw_frames_ctx_out);
398  if (!outl->hw_frames_ctx)
399  return AVERROR(ENOMEM);
400 
401  av_log(ctx, AV_LOG_VERBOSE, "D3D11 scale config: %dx%d -> %dx%d\n",
402  inlink->w, inlink->h, outlink->w, outlink->h);
403  return 0;
404 }
405 
407  ScaleD3D11Context *s = ctx->priv;
408 
409  ///< Release D3D11 resources
411 
412  ///< Free the hardware device context reference
413  av_buffer_unref(&s->hw_frames_ctx_out);
414  av_buffer_unref(&s->hw_device_ctx);
415 
416  ///< Free option strings
417  av_freep(&s->w_expr);
418  av_freep(&s->h_expr);
419 }
420 
421 static const AVFilterPad scale_d3d11_inputs[] = {
422  {
423  .name = "default",
424  .type = AVMEDIA_TYPE_VIDEO,
425  .filter_frame = scale_d3d11_filter_frame,
426  },
427 };
428 
430  {
431  .name = "default",
432  .type = AVMEDIA_TYPE_VIDEO,
433  .config_props = scale_d3d11_config_props,
434  },
435 };
436 
437 #define OFFSET(x) offsetof(ScaleD3D11Context, x)
438 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
439 
440 static const AVOption scale_d3d11_options[] = {
441  { "width", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, {.str = "iw"}, .flags = FLAGS },
442  { "height", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, {.str = "ih"}, .flags = FLAGS },
443  { "format", "Output video pixel format", OFFSET(format), AV_OPT_TYPE_PIXEL_FMT, { .i64 = AV_PIX_FMT_NONE }, INT_MIN, INT_MAX, .flags=FLAGS },
444  { NULL }
445 };
446 
447 AVFILTER_DEFINE_CLASS(scale_d3d11);
448 
450  .p.name = "scale_d3d11",
451  .p.description = NULL_IF_CONFIG_SMALL("Scale video using Direct3D11"),
452  .priv_size = sizeof(ScaleD3D11Context),
453  .p.priv_class = &scale_d3d11_class,
454  .init = scale_d3d11_init,
455  .uninit = scale_d3d11_uninit,
459  .p.flags = AVFILTER_FLAG_HWDEVICE,
460  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
461 };
ScaleD3D11Context
Definition: vf_scale_d3d11.c:35
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
scale_d3d11_options
static const AVOption scale_d3d11_options[]
Definition: vf_scale_d3d11.c:440
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(scale_d3d11)
out
FILE * out
Definition: movenc.c:55
ScaleD3D11Context::output_format
DXGI_FORMAT output_format
Definition: vf_scale_d3d11.c:58
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1062
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:63
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:337
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:263
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:421
pixdesc.h
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:263
AVOption
AVOption.
Definition: opt.h:429
scale_d3d11_filter_frame
static int scale_d3d11_filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_scale_d3d11.c:147
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
ff_scale_eval_dimensions
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
Definition: scale_eval.c:57
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:215
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:220
video.h
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:442
scale_d3d11_init
static av_cold int scale_d3d11_init(AVFilterContext *ctx)
Definition: vf_scale_d3d11.c:61
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
OFFSET
#define OFFSET(x)
Definition: vf_scale_d3d11.c:437
fail
#define fail()
Definition: checkasm.h:199
ScaleD3D11Context::device
ID3D11Device * device
Definition: vf_scale_d3d11.c:42
ScaleD3D11Context::classCtx
const AVClass * classCtx
Definition: vf_scale_d3d11.c:36
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:39
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
ScaleD3D11Context::input_format
DXGI_FORMAT input_format
Definition: vf_scale_d3d11.c:57
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:51
scale_d3d11_configure_processor
static int scale_d3d11_configure_processor(ScaleD3D11Context *s, AVFilterContext *ctx)
Definition: vf_scale_d3d11.c:88
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
av_cold
#define av_cold
Definition: attributes.h:90
scale_d3d11_outputs
static const AVFilterPad scale_d3d11_outputs[]
Definition: vf_scale_d3d11.c:429
AVHWFramesContext::height
int height
Definition: hwcontext.h:220
FFFilter
Definition: filters.h:266
ScaleD3D11Context::processor
ID3D11VideoProcessor * processor
Definition: vf_scale_d3d11.c:45
s
#define s(width, name)
Definition: cbs_vp9.c:198
ScaleD3D11Context::context
ID3D11DeviceContext * context
Definition: vf_scale_d3d11.c:43
filters.h
ScaleD3D11Context::format
enum AVPixelFormat format
D3D11 objects.
Definition: vf_scale_d3d11.c:39
ctx
AVFormatContext * ctx
Definition: movenc.c:49
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:264
if
if(ret)
Definition: filter_design.txt:179
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:213
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:597
format
New swscale design to change SwsGraph is what coordinates multiple passes These can include cascaded scaling error diffusion and so on Or we could have separate passes for the vertical and horizontal scaling In between each SwsPass lies a fully allocated image buffer Graph passes may have different levels of e g we can have a single threaded error diffusion pass following a multi threaded scaling pass SwsGraph is internally recreated whenever the image format
Definition: swscale-v2.txt:14
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:129
ScaleD3D11Context::videoDevice
ID3D11VideoDevice * videoDevice
Definition: vf_scale_d3d11.c:44
ff_vf_scale_d3d11
const FFFilter ff_vf_scale_d3d11
Definition: vf_scale_d3d11.c:449
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:198
scale_d3d11_uninit
static av_cold void scale_d3d11_uninit(AVFilterContext *ctx)
Definition: vf_scale_d3d11.c:406
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: filters.h:207
scale_d3d11_config_props
static int scale_d3d11_config_props(AVFilterLink *outlink)
Definition: vf_scale_d3d11.c:315
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AVFILTER_FLAG_HWDEVICE
#define AVFILTER_FLAG_HWDEVICE
The filter can create hardware frames using AVFilterContext.hw_device_ctx.
Definition: avfilter.h:183
ScaleD3D11Context::inputHeight
int inputHeight
Definition: vf_scale_d3d11.c:56
scale_eval.h
ScaleD3D11Context::inputWidth
int inputWidth
Definition: vf_scale_d3d11.c:56
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
ScaleD3D11Context::w_expr
char * w_expr
Definition: vf_scale_d3d11.c:37
FLAGS
#define FLAGS
Definition: vf_scale_d3d11.c:438
ScaleD3D11Context::height
int height
Definition: vf_scale_d3d11.c:55
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
ScaleD3D11Context::outputView
ID3D11VideoProcessorOutputView * outputView
Definition: vf_scale_d3d11.c:47
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:45
ScaleD3D11Context::hw_frames_ctx_out
AVBufferRef * hw_frames_ctx_out
Dimensions and formats.
Definition: vf_scale_d3d11.c:52
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:153
ScaleD3D11Context::width
int width
Definition: vf_scale_d3d11.c:55
ScaleD3D11Context::enumerator
ID3D11VideoProcessorEnumerator * enumerator
Definition: vf_scale_d3d11.c:46
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:718
release_d3d11_resources
static void release_d3d11_resources(ScaleD3D11Context *s)
Definition: vf_scale_d3d11.c:66
scale_d3d11_inputs
static const AVFilterPad scale_d3d11_inputs[]
Definition: vf_scale_d3d11.c:421
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
ScaleD3D11Context::hw_device_ctx
AVBufferRef * hw_device_ctx
Definition: vf_scale_d3d11.c:51
AV_OPT_TYPE_PIXEL_FMT
@ AV_OPT_TYPE_PIXEL_FMT
Underlying C type is enum AVPixelFormat.
Definition: opt.h:307
AVFilterContext
An instance of a filter.
Definition: avfilter.h:269
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:602
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:190
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:270
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
ScaleD3D11Context::inputView
ID3D11VideoProcessorInputView * inputView
Buffer references.
Definition: vf_scale_d3d11.c:48
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
ScaleD3D11Context::h_expr
char * h_expr
Definition: vf_scale_d3d11.c:38
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
Definition: opt.h:276
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:506
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: filters.h:253
hwcontext_d3d11va.h
w32dlfcn.h