FFmpeg
hwcontext_d3d11va.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <windows.h>
22 
23 #define COBJMACROS
24 
25 #include <initguid.h>
26 #include <d3d11.h>
27 #include <dxgi1_2.h>
28 
29 #if HAVE_DXGIDEBUG_H
30 #include <dxgidebug.h>
31 #endif
32 
33 #include "avassert.h"
34 #include "common.h"
35 #include "hwcontext.h"
36 #include "hwcontext_d3d11va.h"
37 #include "hwcontext_internal.h"
38 #include "imgutils.h"
39 #include "mem.h"
40 #include "pixdesc.h"
41 #include "pixfmt.h"
42 #include "thread.h"
43 #include "compat/w32dlfcn.h"
44 
45 #define MAX_ARRAY_SIZE 64 // Driver specification limits ArraySize to 64 for decoder-bound resources
46 
47 typedef HRESULT(WINAPI *PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory);
48 
50 
52 static PFN_D3D11_CREATE_DEVICE mD3D11CreateDevice;
53 
54 static av_cold void load_functions(void)
55 {
56 #if !HAVE_UWP
57  // We let these "leak" - this is fine, as unloading has no great benefit, and
58  // Windows will mark a DLL as loaded forever if its internal refcount overflows
59  // from too many LoadLibrary calls.
60  HANDLE d3dlib, dxgilib;
61 
62  d3dlib = dlopen("d3d11.dll", 0);
63  dxgilib = dlopen("dxgi.dll", 0);
64  if (!d3dlib || !dxgilib)
65  return;
66 
67  mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) GetProcAddress(d3dlib, "D3D11CreateDevice");
68  mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) GetProcAddress(dxgilib, "CreateDXGIFactory1");
69  if (!mCreateDXGIFactory)
70  mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) GetProcAddress(dxgilib, "CreateDXGIFactory");
71 #else
72  // In UWP (which lacks LoadLibrary), CreateDXGIFactory isn't available,
73  // only CreateDXGIFactory1
74  mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) D3D11CreateDevice;
75  mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) CreateDXGIFactory1;
76 #endif
77 }
78 
79 typedef struct D3D11VAFramesContext {
80  /**
81  * The public AVD3D11VAFramesContext. See hwcontext_d3d11va.h for it.
82  */
84 
87 
88  DXGI_FORMAT format;
89 
90  ID3D11Texture2D *staging_texture;
92 
93 static const struct {
94  DXGI_FORMAT d3d_format;
96 } supported_formats[] = {
97  { DXGI_FORMAT_NV12, AV_PIX_FMT_NV12 },
98  { DXGI_FORMAT_P010, AV_PIX_FMT_P010 },
100  { DXGI_FORMAT_R10G10B10A2_UNORM, AV_PIX_FMT_X2BGR10 },
102  { DXGI_FORMAT_AYUV, AV_PIX_FMT_VUYX },
103  { DXGI_FORMAT_YUY2, AV_PIX_FMT_YUYV422 },
104  { DXGI_FORMAT_Y210, AV_PIX_FMT_Y210 },
105  { DXGI_FORMAT_Y410, AV_PIX_FMT_XV30 },
106  { DXGI_FORMAT_P016, AV_PIX_FMT_P012 },
107  { DXGI_FORMAT_Y216, AV_PIX_FMT_Y216 },
108  { DXGI_FORMAT_Y416, AV_PIX_FMT_XV48 },
109  // There is no 12bit pixel format defined in DXGI_FORMAT*, use 16bit to compatible
110  // with 12 bit AV_PIX_FMT* formats.
111  { DXGI_FORMAT_Y216, AV_PIX_FMT_Y212 },
112  { DXGI_FORMAT_Y416, AV_PIX_FMT_XV36 },
113  // Special opaque formats. The pix_fmt is merely a place holder, as the
114  // opaque format cannot be accessed directly.
115  { DXGI_FORMAT_420_OPAQUE, AV_PIX_FMT_YUV420P },
116 };
117 
118 static void d3d11va_default_lock(void *ctx)
119 {
120  WaitForSingleObjectEx(ctx, INFINITE, FALSE);
121 }
122 
123 static void d3d11va_default_unlock(void *ctx)
124 {
125  ReleaseMutex(ctx);
126 }
127 
129 {
130  D3D11VAFramesContext *s = ctx->hwctx;
131  AVD3D11VAFramesContext *frames_hwctx = &s->p;
132 
133  if (frames_hwctx->texture)
134  ID3D11Texture2D_Release(frames_hwctx->texture);
135  frames_hwctx->texture = NULL;
136 
137  if (s->staging_texture)
138  ID3D11Texture2D_Release(s->staging_texture);
139  s->staging_texture = NULL;
140 
141  av_freep(&frames_hwctx->texture_infos);
142 }
143 
145  const void *hwconfig,
146  AVHWFramesConstraints *constraints)
147 {
148  AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
149  int nb_sw_formats = 0;
150  HRESULT hr;
151  int i;
152 
154  sizeof(*constraints->valid_sw_formats));
155  if (!constraints->valid_sw_formats)
156  return AVERROR(ENOMEM);
157 
158  for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
159  UINT format_support = 0;
160  hr = ID3D11Device_CheckFormatSupport(device_hwctx->device, supported_formats[i].d3d_format, &format_support);
161  if (SUCCEEDED(hr) && (format_support & D3D11_FORMAT_SUPPORT_TEXTURE2D))
162  constraints->valid_sw_formats[nb_sw_formats++] = supported_formats[i].pix_fmt;
163  }
164  constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
165 
166  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
167  if (!constraints->valid_hw_formats)
168  return AVERROR(ENOMEM);
169 
170  constraints->valid_hw_formats[0] = AV_PIX_FMT_D3D11;
171  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
172 
173  return 0;
174 }
175 
176 static void free_texture(void *opaque, uint8_t *data)
177 {
178  ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);
179  av_free(data);
180 }
181 
182 static AVBufferRef *wrap_texture_buf(AVHWFramesContext *ctx, ID3D11Texture2D *tex, int index)
183 {
184  AVBufferRef *buf;
186  D3D11VAFramesContext *s = ctx->hwctx;
187  AVD3D11VAFramesContext *frames_hwctx = &s->p;
188  if (!desc) {
189  ID3D11Texture2D_Release(tex);
190  return NULL;
191  }
192 
193  if (s->nb_surfaces <= s->nb_surfaces_used) {
194  frames_hwctx->texture_infos = av_realloc_f(frames_hwctx->texture_infos,
195  s->nb_surfaces_used + 1,
196  sizeof(*frames_hwctx->texture_infos));
197  if (!frames_hwctx->texture_infos) {
198  ID3D11Texture2D_Release(tex);
199  av_free(desc);
200  return NULL;
201  }
202  s->nb_surfaces = s->nb_surfaces_used + 1;
203  }
204 
205  frames_hwctx->texture_infos[s->nb_surfaces_used].texture = tex;
206  frames_hwctx->texture_infos[s->nb_surfaces_used].index = index;
207  s->nb_surfaces_used++;
208 
209  desc->texture = tex;
210  desc->index = index;
211 
212  buf = av_buffer_create((uint8_t *)desc, sizeof(*desc), free_texture, tex, 0);
213  if (!buf) {
214  ID3D11Texture2D_Release(tex);
215  av_free(desc);
216  return NULL;
217  }
218 
219  return buf;
220 }
221 
223 {
224  D3D11VAFramesContext *s = ctx->hwctx;
225  AVD3D11VAFramesContext *hwctx = &s->p;
226  AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
227  HRESULT hr;
228  ID3D11Texture2D *tex;
229  D3D11_TEXTURE2D_DESC texDesc = {
230  .Width = ctx->width,
231  .Height = ctx->height,
232  .MipLevels = 1,
233  .Format = s->format,
234  .SampleDesc = { .Count = 1 },
235  .ArraySize = 1,
236  .Usage = D3D11_USAGE_DEFAULT,
237  .BindFlags = hwctx->BindFlags,
238  .MiscFlags = hwctx->MiscFlags,
239  };
240 
241  hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &tex);
242  if (FAILED(hr)) {
243  av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
244  return NULL;
245  }
246 
247  return wrap_texture_buf(ctx, tex, 0);
248 }
249 
250 static AVBufferRef *d3d11va_pool_alloc(void *opaque, size_t size)
251 {
253  D3D11VAFramesContext *s = ctx->hwctx;
254  AVD3D11VAFramesContext *hwctx = &s->p;
255  D3D11_TEXTURE2D_DESC texDesc;
256 
257  if (!hwctx->texture)
258  return d3d11va_alloc_single(ctx);
259 
260  ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc);
261 
262  if (s->nb_surfaces_used >= texDesc.ArraySize) {
263  av_log(ctx, AV_LOG_ERROR, "Static surface pool size exceeded.\n");
264  return NULL;
265  }
266 
267  ID3D11Texture2D_AddRef(hwctx->texture);
268  return wrap_texture_buf(ctx, hwctx->texture, s->nb_surfaces_used);
269 }
270 
272 {
273  AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
274  D3D11VAFramesContext *s = ctx->hwctx;
275  AVD3D11VAFramesContext *hwctx = &s->p;
276 
277  int i;
278  HRESULT hr;
279  D3D11_TEXTURE2D_DESC texDesc;
280 
281  for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
282  if (ctx->sw_format == supported_formats[i].pix_fmt) {
283  s->format = supported_formats[i].d3d_format;
284  break;
285  }
286  }
288  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n",
289  av_get_pix_fmt_name(ctx->sw_format));
290  return AVERROR(EINVAL);
291  }
292 
293  hwctx->BindFlags |= device_hwctx->BindFlags;
294  hwctx->MiscFlags |= device_hwctx->MiscFlags;
295 
296  ctx->initial_pool_size = FFMIN(ctx->initial_pool_size, MAX_ARRAY_SIZE);
297 
298  texDesc = (D3D11_TEXTURE2D_DESC){
299  .Width = ctx->width,
300  .Height = ctx->height,
301  .MipLevels = 1,
302  .Format = s->format,
303  .SampleDesc = { .Count = 1 },
304  .ArraySize = ctx->initial_pool_size,
305  .Usage = D3D11_USAGE_DEFAULT,
306  .BindFlags = hwctx->BindFlags,
307  .MiscFlags = hwctx->MiscFlags,
308  };
309 
310  if (hwctx->texture) {
311  D3D11_TEXTURE2D_DESC texDesc2;
312  ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc2);
313 
314  if (texDesc.Width != texDesc2.Width ||
315  texDesc.Height != texDesc2.Height ||
316  texDesc.Format != texDesc2.Format) {
317  av_log(ctx, AV_LOG_ERROR, "User-provided texture has mismatching parameters\n");
318  return AVERROR(EINVAL);
319  }
320 
321  ctx->initial_pool_size = texDesc2.ArraySize;
322  hwctx->BindFlags = texDesc2.BindFlags;
323  hwctx->MiscFlags = texDesc2.MiscFlags;
324  } else if (texDesc.ArraySize > 0) {
325  hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &hwctx->texture);
326  if (FAILED(hr)) {
327  av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
328  return AVERROR_UNKNOWN;
329  }
330  }
331 
332  hwctx->texture_infos = av_realloc_f(NULL, ctx->initial_pool_size, sizeof(*hwctx->texture_infos));
333  if (!hwctx->texture_infos)
334  return AVERROR(ENOMEM);
335  s->nb_surfaces = ctx->initial_pool_size;
336 
340  if (!ffhwframesctx(ctx)->pool_internal)
341  return AVERROR(ENOMEM);
342 
343  return 0;
344 }
345 
347 {
349 
350  frame->buf[0] = av_buffer_pool_get(ctx->pool);
351  if (!frame->buf[0])
352  return AVERROR(ENOMEM);
353 
354  desc = (AVD3D11FrameDescriptor *)frame->buf[0]->data;
355 
356  frame->data[0] = (uint8_t *)desc->texture;
357  frame->data[1] = (uint8_t *)desc->index;
358  frame->format = AV_PIX_FMT_D3D11;
359  frame->width = ctx->width;
360  frame->height = ctx->height;
361 
362  return 0;
363 }
364 
367  enum AVPixelFormat **formats)
368 {
369  D3D11VAFramesContext *s = ctx->hwctx;
370  enum AVPixelFormat *fmts;
371 
372  fmts = av_malloc_array(2, sizeof(*fmts));
373  if (!fmts)
374  return AVERROR(ENOMEM);
375 
376  fmts[0] = ctx->sw_format;
377  fmts[1] = AV_PIX_FMT_NONE;
378 
379  // Don't signal support for opaque formats. Actual access would fail.
380  if (s->format == DXGI_FORMAT_420_OPAQUE)
381  fmts[0] = AV_PIX_FMT_NONE;
382 
383  *formats = fmts;
384 
385  return 0;
386 }
387 
389 {
390  AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
391  D3D11VAFramesContext *s = ctx->hwctx;
392  HRESULT hr;
393  D3D11_TEXTURE2D_DESC texDesc = {
394  .Width = ctx->width,
395  .Height = ctx->height,
396  .MipLevels = 1,
397  .Format = format,
398  .SampleDesc = { .Count = 1 },
399  .ArraySize = 1,
400  .Usage = D3D11_USAGE_STAGING,
401  .CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
402  };
403 
404  hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &s->staging_texture);
405  if (FAILED(hr)) {
406  av_log(ctx, AV_LOG_ERROR, "Could not create the staging texture (%lx)\n", (long)hr);
407  return AVERROR_UNKNOWN;
408  }
409 
410  return 0;
411 }
412 
413 static void fill_texture_ptrs(uint8_t *data[4], int linesize[4],
415  D3D11_TEXTURE2D_DESC *desc,
416  D3D11_MAPPED_SUBRESOURCE *map)
417 {
418  int i;
419 
420  for (i = 0; i < 4; i++)
421  linesize[i] = map->RowPitch;
422 
423  av_image_fill_pointers(data, ctx->sw_format, desc->Height,
424  (uint8_t*)map->pData, linesize);
425 }
426 
428  const AVFrame *src)
429 {
430  AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
431  D3D11VAFramesContext *s = ctx->hwctx;
432  int download = src->format == AV_PIX_FMT_D3D11;
433  const AVFrame *frame = download ? src : dst;
434  const AVFrame *other = download ? dst : src;
435  // (The interface types are compatible.)
436  ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D *)frame->data[0];
437  int index = (intptr_t)frame->data[1];
438  ID3D11Resource *staging;
439  int w = FFMIN(dst->width, src->width);
440  int h = FFMIN(dst->height, src->height);
441  uint8_t *map_data[4];
442  int map_linesize[4];
443  D3D11_TEXTURE2D_DESC desc;
444  D3D11_MAPPED_SUBRESOURCE map;
445  HRESULT hr;
446  int res;
447 
448  if (frame->hw_frames_ctx->data != (uint8_t *)ctx || other->format != ctx->sw_format)
449  return AVERROR(EINVAL);
450 
451  device_hwctx->lock(device_hwctx->lock_ctx);
452 
453  if (!s->staging_texture) {
454  ID3D11Texture2D_GetDesc((ID3D11Texture2D *)texture, &desc);
455  res = d3d11va_create_staging_texture(ctx, desc.Format);
456  if (res < 0)
457  return res;
458  }
459 
460  staging = (ID3D11Resource *)s->staging_texture;
461 
462  ID3D11Texture2D_GetDesc(s->staging_texture, &desc);
463 
464  if (download) {
465  ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
466  staging, 0, 0, 0, 0,
467  texture, index, NULL);
468 
469  hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
470  staging, 0, D3D11_MAP_READ, 0, &map);
471  if (FAILED(hr))
472  goto map_failed;
473 
474  fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
475 
476  av_image_copy2(dst->data, dst->linesize, map_data, map_linesize,
477  ctx->sw_format, w, h);
478 
479  ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
480  } else {
481  hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
482  staging, 0, D3D11_MAP_WRITE, 0, &map);
483  if (FAILED(hr))
484  goto map_failed;
485 
486  fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
487 
488  av_image_copy2(map_data, map_linesize, src->data, src->linesize,
489  ctx->sw_format, w, h);
490 
491  ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
492 
493  ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
494  texture, index, 0, 0, 0,
495  staging, 0, NULL);
496  }
497 
498  device_hwctx->unlock(device_hwctx->lock_ctx);
499  return 0;
500 
501 map_failed:
502  av_log(ctx, AV_LOG_ERROR, "Unable to lock D3D11VA surface (%lx)\n", (long)hr);
503  device_hwctx->unlock(device_hwctx->lock_ctx);
504  return AVERROR_UNKNOWN;
505 }
506 
508 {
509  AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
510  HRESULT hr;
511 
512  if (!device_hwctx->lock) {
513  device_hwctx->lock_ctx = CreateMutex(NULL, 0, NULL);
514  if (device_hwctx->lock_ctx == INVALID_HANDLE_VALUE) {
515  av_log(NULL, AV_LOG_ERROR, "Failed to create a mutex\n");
516  return AVERROR(EINVAL);
517  }
518  device_hwctx->lock = d3d11va_default_lock;
519  device_hwctx->unlock = d3d11va_default_unlock;
520  }
521 
522  if (!device_hwctx->device_context) {
523  ID3D11Device_GetImmediateContext(device_hwctx->device, &device_hwctx->device_context);
524  if (!device_hwctx->device_context)
525  return AVERROR_UNKNOWN;
526  }
527 
528  if (!device_hwctx->video_device) {
529  hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device, &IID_ID3D11VideoDevice,
530  (void **)&device_hwctx->video_device);
531  if (FAILED(hr))
532  return AVERROR_UNKNOWN;
533  }
534 
535  if (!device_hwctx->video_context) {
536  hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device_context, &IID_ID3D11VideoContext,
537  (void **)&device_hwctx->video_context);
538  if (FAILED(hr))
539  return AVERROR_UNKNOWN;
540  }
541 
542  return 0;
543 }
544 
546 {
547  AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
548 
549  if (device_hwctx->device) {
550  ID3D11Device_Release(device_hwctx->device);
551  device_hwctx->device = NULL;
552  }
553 
554  if (device_hwctx->device_context) {
555  ID3D11DeviceContext_Release(device_hwctx->device_context);
556  device_hwctx->device_context = NULL;
557  }
558 
559  if (device_hwctx->video_device) {
560  ID3D11VideoDevice_Release(device_hwctx->video_device);
561  device_hwctx->video_device = NULL;
562  }
563 
564  if (device_hwctx->video_context) {
565  ID3D11VideoContext_Release(device_hwctx->video_context);
566  device_hwctx->video_context = NULL;
567  }
568 
569  if (device_hwctx->lock == d3d11va_default_lock) {
570  CloseHandle(device_hwctx->lock_ctx);
571  device_hwctx->lock_ctx = INVALID_HANDLE_VALUE;
572  device_hwctx->lock = NULL;
573  }
574 }
575 
576 static int d3d11va_device_find_adapter_by_vendor_id(AVHWDeviceContext *ctx, uint32_t flags, const char *vendor_id)
577 {
578  HRESULT hr;
579  IDXGIAdapter *adapter = NULL;
580  IDXGIFactory2 *factory;
581  int adapter_id = 0;
582  long int id = strtol(vendor_id, NULL, 0);
583 
584  hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&factory);
585  if (FAILED(hr)) {
586  av_log(ctx, AV_LOG_ERROR, "CreateDXGIFactory returned error\n");
587  return -1;
588  }
589 
590  while (IDXGIFactory2_EnumAdapters(factory, adapter_id++, &adapter) != DXGI_ERROR_NOT_FOUND) {
591  ID3D11Device* device = NULL;
592  DXGI_ADAPTER_DESC adapter_desc;
593 
594  hr = mD3D11CreateDevice(adapter, D3D_DRIVER_TYPE_UNKNOWN, NULL, flags, NULL, 0, D3D11_SDK_VERSION, &device, NULL, NULL);
595  if (FAILED(hr)) {
596  av_log(ctx, AV_LOG_DEBUG, "D3D11CreateDevice returned error, try next adapter\n");
597  IDXGIAdapter_Release(adapter);
598  continue;
599  }
600 
601  hr = IDXGIAdapter2_GetDesc(adapter, &adapter_desc);
602  ID3D11Device_Release(device);
603  IDXGIAdapter_Release(adapter);
604  if (FAILED(hr)) {
605  av_log(ctx, AV_LOG_DEBUG, "IDXGIAdapter2_GetDesc returned error, try next adapter\n");
606  continue;
607  } else if (adapter_desc.VendorId == id) {
608  IDXGIFactory2_Release(factory);
609  return adapter_id - 1;
610  }
611  }
612 
613  IDXGIFactory2_Release(factory);
614  return -1;
615 }
616 
617 static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
618  AVDictionary *opts, int flags)
619 {
620  AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
621 
622  HRESULT hr;
623  IDXGIAdapter *pAdapter = NULL;
624  ID3D10Multithread *pMultithread;
625  UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
626  int is_debug = !!av_dict_get(opts, "debug", NULL, 0);
627  int ret;
628  int adapter = -1;
629 
630  if (is_debug) {
631  creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
632  av_log(ctx, AV_LOG_INFO, "Enabling d3d11 debugging.\n");
633  }
634 
636  return AVERROR_UNKNOWN;
638  av_log(ctx, AV_LOG_ERROR, "Failed to load D3D11 library or its functions\n");
639  return AVERROR_UNKNOWN;
640  }
641 
642  if (device) {
643  adapter = atoi(device);
644  } else {
645  AVDictionaryEntry *e = av_dict_get(opts, "vendor_id", NULL, 0);
646  if (e && e->value) {
647  adapter = d3d11va_device_find_adapter_by_vendor_id(ctx, creationFlags, e->value);
648  if (adapter < 0) {
649  av_log(ctx, AV_LOG_ERROR, "Failed to find d3d11va adapter by "
650  "vendor id %s\n", e->value);
651  return AVERROR_UNKNOWN;
652  }
653  }
654  }
655 
656  if (adapter >= 0) {
657  IDXGIFactory2 *pDXGIFactory;
658 
659  av_log(ctx, AV_LOG_VERBOSE, "Selecting d3d11va adapter %d\n", adapter);
660  hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
661  if (SUCCEEDED(hr)) {
662  if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
663  pAdapter = NULL;
664  IDXGIFactory2_Release(pDXGIFactory);
665  }
666  }
667 
668  if (pAdapter) {
669  DXGI_ADAPTER_DESC desc;
670  hr = IDXGIAdapter2_GetDesc(pAdapter, &desc);
671  if (!FAILED(hr)) {
672  av_log(ctx, AV_LOG_INFO, "Using device %04x:%04x (%ls).\n",
673  desc.VendorId, desc.DeviceId, desc.Description);
674  }
675  }
676 
677  hr = mD3D11CreateDevice(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, NULL, creationFlags, NULL, 0,
678  D3D11_SDK_VERSION, &device_hwctx->device, NULL, NULL);
679  if (pAdapter)
680  IDXGIAdapter_Release(pAdapter);
681  if (FAILED(hr)) {
682  av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device (%lx)\n", (long)hr);
683  return AVERROR_UNKNOWN;
684  }
685 
686  hr = ID3D11Device_QueryInterface(device_hwctx->device, &IID_ID3D10Multithread, (void **)&pMultithread);
687  if (SUCCEEDED(hr)) {
688  ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
689  ID3D10Multithread_Release(pMultithread);
690  }
691 
692 #if !HAVE_UWP && HAVE_DXGIDEBUG_H
693  if (is_debug) {
694  HANDLE dxgidebug_dll = LoadLibrary("dxgidebug.dll");
695  if (dxgidebug_dll) {
696  HRESULT (WINAPI * pf_DXGIGetDebugInterface)(const GUID *riid, void **ppDebug)
697  = (void *)GetProcAddress(dxgidebug_dll, "DXGIGetDebugInterface");
698  if (pf_DXGIGetDebugInterface) {
699  IDXGIDebug *dxgi_debug = NULL;
700  hr = pf_DXGIGetDebugInterface(&IID_IDXGIDebug, (void**)&dxgi_debug);
701  if (SUCCEEDED(hr) && dxgi_debug) {
702  IDXGIDebug_ReportLiveObjects(dxgi_debug, DXGI_DEBUG_ALL, DXGI_DEBUG_RLO_ALL);
703  av_log(ctx, AV_LOG_INFO, "Enabled dxgi debugging.\n");
704  } else {
705  av_log(ctx, AV_LOG_WARNING, "Failed enabling dxgi debugging.\n");
706  }
707  } else {
708  av_log(ctx, AV_LOG_WARNING, "Failed getting dxgi debug interface.\n");
709  }
710  } else {
711  av_log(ctx, AV_LOG_WARNING, "Failed loading dxgi debug library.\n");
712  }
713  }
714 #endif
715 
716  if (av_dict_get(opts, "SHADER", NULL, 0))
717  device_hwctx->BindFlags |= D3D11_BIND_SHADER_RESOURCE;
718 
719  if (av_dict_get(opts, "UAV", NULL, 0))
720  device_hwctx->BindFlags |= D3D11_BIND_UNORDERED_ACCESS;
721 
722  if (av_dict_get(opts, "RTV", NULL, 0))
723  device_hwctx->BindFlags |= D3D11_BIND_RENDER_TARGET;
724 
725  if (av_dict_get(opts, "SHARED", NULL, 0))
726  device_hwctx->MiscFlags |= D3D11_RESOURCE_MISC_SHARED;
727 
728  return 0;
729 }
730 
733  .name = "D3D11VA",
734 
735  .device_hwctx_size = sizeof(AVD3D11VADeviceContext),
736  .frames_hwctx_size = sizeof(D3D11VAFramesContext),
737 
738  .device_create = d3d11va_device_create,
739  .device_init = d3d11va_device_init,
740  .device_uninit = d3d11va_device_uninit,
741  .frames_get_constraints = d3d11va_frames_get_constraints,
742  .frames_init = d3d11va_frames_init,
743  .frames_uninit = d3d11va_frames_uninit,
744  .frames_get_buffer = d3d11va_get_buffer,
745  .transfer_get_formats = d3d11va_transfer_get_formats,
746  .transfer_data_to = d3d11va_transfer_data,
747  .transfer_data_from = d3d11va_transfer_data,
748 
749  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_D3D11, AV_PIX_FMT_NONE },
750 };
d3d11va_alloc_single
static AVBufferRef * d3d11va_alloc_single(AVHWFramesContext *ctx)
Definition: hwcontext_d3d11va.c:222
flags
const SwsFlags flags[]
Definition: swscale.c:61
formats
formats
Definition: signature.h:47
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
FFHWFramesContext::pool_internal
AVBufferPool * pool_internal
Definition: hwcontext_internal.h:101
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
DXGI_FORMAT_B8G8R8A8_UNORM
@ DXGI_FORMAT_B8G8R8A8_UNORM
Definition: dds.c:91
thread.h
d3d11va_transfer_get_formats
static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_d3d11va.c:365
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
pixdesc.h
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:180
AV_PIX_FMT_Y216
#define AV_PIX_FMT_Y216
Definition: pixfmt.h:608
data
const char data[16]
Definition: mxf.c:149
d3d11va_transfer_data
static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_d3d11va.c:427
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:609
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
AVDictionary
Definition: dict.c:32
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:449
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
fill_texture_ptrs
static void fill_texture_ptrs(uint8_t *data[4], int linesize[4], AVHWFramesContext *ctx, D3D11_TEXTURE2D_DESC *desc, D3D11_MAPPED_SUBRESOURCE *map)
Definition: hwcontext_d3d11va.c:413
PFN_CREATE_DXGI_FACTORY
HRESULT(WINAPI * PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory)
Definition: hwcontext_d3d11va.c:47
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:444
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:174
ff_hwcontext_type_d3d11va
const HWContextType ff_hwcontext_type_d3d11va
Definition: hwcontext_d3d11va.c:731
d3d11va_get_buffer
static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_d3d11va.c:346
d3d_format
DXGI_FORMAT d3d_format
Definition: hwcontext_d3d11va.c:94
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:131
d3d11va_create_staging_texture
static int d3d11va_create_staging_texture(AVHWFramesContext *ctx, DXGI_FORMAT format)
Definition: hwcontext_d3d11va.c:388
D3D11VAFramesContext::format
DXGI_FORMAT format
Definition: hwcontext_d3d11va.c:88
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
d3d11va_frames_uninit
static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_d3d11va.c:128
d3d11va_default_unlock
static void d3d11va_default_unlock(void *ctx)
Definition: hwcontext_d3d11va.c:123
D3D11VAFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_d3d11va.c:86
AV_PIX_FMT_XV48
#define AV_PIX_FMT_XV48
Definition: pixfmt.h:611
av_image_fill_pointers
int av_image_fill_pointers(uint8_t *data[4], enum AVPixelFormat pix_fmt, int height, uint8_t *ptr, const int linesizes[4])
Fill plane data pointers for an image with pixel format pix_fmt and height height.
Definition: imgutils.c:145
d3d11va_frames_get_constraints
static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_d3d11va.c:144
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:606
avassert.h
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
ffhwframesctx
static FFHWFramesContext * ffhwframesctx(AVHWFramesContext *ctx)
Definition: hwcontext_internal.h:115
ff_thread_once
static int ff_thread_once(char *control, void(*routine)(void))
Definition: thread.h:205
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:106
load_functions
static av_cold void load_functions(void)
Definition: hwcontext_d3d11va.c:54
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:456
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:60
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:390
AVD3D11VADeviceContext::video_context
ID3D11VideoContext * video_context
If unset, this will be set from the device_context field on init.
Definition: hwcontext_d3d11va.h:80
s
#define s(width, name)
Definition: cbs_vp9.c:198
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
d3d11va_device_create
static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_d3d11va.c:617
functions_loaded
static AVOnce functions_loaded
Definition: hwcontext_d3d11va.c:49
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
av_mallocz
#define av_mallocz(s)
Definition: tableprint_vlc.h:31
av_realloc_f
#define av_realloc_f(p, o, n)
Definition: tableprint_vlc.h:33
opts
AVDictionary * opts
Definition: movenc.c:51
D3D11VAFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_d3d11va.c:85
AV_ONCE_INIT
#define AV_ONCE_INIT
Definition: thread.h:203
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:189
AVD3D11VADeviceContext::lock_ctx
void * lock_ctx
Definition: hwcontext_d3d11va.h:96
NULL
#define NULL
Definition: coverity.c:32
format
New swscale design to change SwsGraph is what coordinates multiple passes These can include cascaded scaling error diffusion and so on Or we could have separate passes for the vertical and horizontal scaling In between each SwsPass lies a fully allocated image buffer Graph passes may have different levels of e g we can have a single threaded error diffusion pass following a multi threaded scaling pass SwsGraph is internally recreated whenever the image format
Definition: swscale-v2.txt:14
d3d11va_default_lock
static void d3d11va_default_lock(void *ctx)
Definition: hwcontext_d3d11va.c:118
AVD3D11VADeviceContext::video_device
ID3D11VideoDevice * video_device
If unset, this will be set from the device field on init.
Definition: hwcontext_d3d11va.h:72
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
wrap_texture_buf
static AVBufferRef * wrap_texture_buf(AVHWFramesContext *ctx, ID3D11Texture2D *tex, int index)
Definition: hwcontext_d3d11va.c:182
AVOnce
#define AVOnce
Definition: thread.h:202
index
int index
Definition: gxfenc.c:90
AVD3D11VADeviceContext::unlock
void(* unlock)(void *lock_ctx)
Definition: hwcontext_d3d11va.h:95
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:145
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
d3d11va_device_find_adapter_by_vendor_id
static int d3d11va_device_find_adapter_by_vendor_id(AVHWDeviceContext *ctx, uint32_t flags, const char *vendor_id)
Definition: hwcontext_d3d11va.c:576
AV_PIX_FMT_X2BGR10
#define AV_PIX_FMT_X2BGR10
Definition: pixfmt.h:614
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:603
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:87
AVD3D11VADeviceContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags to be applied to D3D11 resources allocated for frames using this devic...
Definition: hwcontext_d3d11va.h:103
size
int size
Definition: twinvq_data.h:10344
MAX_ARRAY_SIZE
#define MAX_ARRAY_SIZE
Definition: hwcontext_d3d11va.c:45
d3d11va_frames_init
static int d3d11va_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_d3d11va.c:271
mCreateDXGIFactory
static PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory
Definition: hwcontext_d3d11va.c:51
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:514
free_texture
static void free_texture(void *opaque, uint8_t *data)
Definition: hwcontext_d3d11va.c:176
AVD3D11VADeviceContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags to be applied to D3D11 resources allocated for frames using this devic...
Definition: hwcontext_d3d11va.h:110
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:607
AVD3D11VAFramesContext::texture
ID3D11Texture2D * texture
The canonical texture used for pool allocation.
Definition: hwcontext_d3d11va.h:166
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:221
DXGI_FORMAT_R16G16B16A16_FLOAT
@ DXGI_FORMAT_R16G16B16A16_FLOAT
Definition: dds.c:62
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
supported_formats
static const struct @507 supported_formats[]
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:139
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVD3D11VADeviceContext::lock
void(* lock)(void *lock_ctx)
Callbacks for locking.
Definition: hwcontext_d3d11va.h:94
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:406
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
mD3D11CreateDevice
static PFN_D3D11_CREATE_DEVICE mD3D11CreateDevice
Definition: hwcontext_d3d11va.c:52
d3d11va_pool_alloc
static AVBufferRef * d3d11va_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_d3d11va.c:250
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_d3d11va.c:95
av_image_copy2
static void av_image_copy2(uint8_t *const dst_data[4], const int dst_linesizes[4], uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Wrapper around av_image_copy() to workaround the limitation that the conversion from uint8_t * const ...
Definition: imgutils.h:184
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
D3D11VAFramesContext
Definition: hwcontext_d3d11va.c:79
d3d11va_device_uninit
static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
Definition: hwcontext_d3d11va.c:545
D3D11VAFramesContext::staging_texture
ID3D11Texture2D * staging_texture
Definition: hwcontext_d3d11va.c:90
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:602
desc
const char * desc
Definition: libsvtav1.c:78
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
d3d11va_device_init
static int d3d11va_device_init(AVHWDeviceContext *hwdev)
Definition: hwcontext_d3d11va.c:507
D3D11VAFramesContext::p
AVD3D11VAFramesContext p
The public AVD3D11VAFramesContext.
Definition: hwcontext_d3d11va.c:83
w
uint8_t w
Definition: llvidencdsp.c:39
hwcontext_internal.h
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
AVDictionaryEntry
Definition: dict.h:90
AV_PIX_FMT_RGBAF16
#define AV_PIX_FMT_RGBAF16
Definition: pixfmt.h:624
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
AVD3D11FrameDescriptor
D3D11 frame descriptor for pool allocation.
Definition: hwcontext_d3d11va.h:123
imgutils.h
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:610
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
AVD3D11VADeviceContext::device_context
ID3D11DeviceContext * device_context
If unset, this will be set from the device field on init.
Definition: hwcontext_d3d11va.h:64
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
h
h
Definition: vp9dsp_template.c:2070
AVDictionaryEntry::value
char * value
Definition: dict.h:92
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:406
hwcontext_d3d11va.h
src
#define src
Definition: vp8dsp.c:248
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3376