Go to the documentation of this file.
   24 #include <vdpau/vdpau.h> 
   43 #ifdef VDP_YCBCR_FORMAT_P016 
   59 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444 
   62 #ifdef VDP_YCBCR_FORMAT_P016 
   76 #ifdef VDP_YCBCR_FORMAT_P016 
  136             if (err == VDP_STATUS_OK && supported)
 
  147 #define GET_CALLBACK(id, result)                                                \ 
  150     err = hwctx->get_proc_address(hwctx->device, id, &tmp);                     \ 
  151     if (err != VDP_STATUS_OK) {                                                 \ 
  152         av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n");     \ 
  153         return AVERROR_UNKNOWN;                                                 \ 
  165     GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
 
  191                                         const void *hwconfig,
 
  195     int nb_sw_formats = 0;
 
  223     VdpVideoSurface            surf = (VdpVideoSurface)(uintptr_t)
data;
 
  236     VdpVideoSurface surf;
 
  240                                    ctx->width, 
ctx->height, &surf);
 
  241     if (err != VDP_STATUS_OK) {
 
  281         if (!
ctx->internal->pool_internal)
 
  315                "No target formats are supported for this chroma type\n");
 
  333     VdpVideoSurface     surf = (VdpVideoSurface)(uintptr_t)
src->data[3];
 
  336     uint32_t linesize[3];
 
  339     VdpYCbCrFormat vdpau_format;
 
  347                    "The linesize %d cannot be represented as uint32\n",
 
  363                "Unsupported target pixel format: %s\n",
 
  368     if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
 
  369 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
 
  370             || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
 
  372 #ifdef VDP_YCBCR_FORMAT_P016
 
  373             || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444_16)
 
  378     err = priv->
get_data(surf, vdpau_format, 
data, linesize);
 
  379     if (err != VDP_STATUS_OK) {
 
  391     VdpVideoSurface     surf = (VdpVideoSurface)(uintptr_t)dst->
data[3];
 
  394     uint32_t linesize[3];
 
  397     VdpYCbCrFormat vdpau_format;
 
  403         if (
src->linesize[
i] < 0 || 
src->linesize[
i] > UINT32_MAX) {
 
  405                    "The linesize %d cannot be represented as uint32\n",
 
  409         linesize[
i] = 
src->linesize[
i];
 
  421                "Unsupported source pixel format: %s\n",
 
  426     if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
 
  427 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
 
  428             || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
 
  433     err = priv->
put_data(surf, vdpau_format, 
data, linesize);
 
  434     if (err != VDP_STATUS_OK) {
 
  443 #include <vdpau/vdpau_x11.h> 
  444 #include <X11/Xlib.h> 
  446 typedef struct VDPAUDevicePriv {
 
  447     VdpDeviceDestroy *device_destroy;
 
  454     VDPAUDevicePriv       *priv = 
ctx->user_opaque;
 
  456     if (priv->device_destroy)
 
  457         priv->device_destroy(hwctx->
device);
 
  459         XCloseDisplay(priv->dpy);
 
  468     VDPAUDevicePriv *priv;
 
  470     VdpGetInformationString *get_information_string;
 
  471     const char *display, *vendor;
 
  477     ctx->user_opaque = priv;
 
  478     ctx->free        = vdpau_device_free;
 
  480     priv->dpy = XOpenDisplay(device);
 
  483                XDisplayName(device));
 
  486     display = XDisplayString(priv->dpy);
 
  488     err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
 
  490     if (err != VDP_STATUS_OK) {
 
  496     GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
 
  497     GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY,         priv->device_destroy);
 
  499     get_information_string(&vendor);
 
  501            "X11 display %s\n", vendor, display);
 
  516     .device_create        = vdpau_device_create,
 
  
AVPixelFormat
Pixel format.
 
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
 
uint8_t * data
The data buffer.
 
static int vdpau_frames_init(AVHWFramesContext *ctx)
 
enum AVPixelFormat pix_fmt
 
This structure describes decoded (raw) audio or video data.
 
enum AVPixelFormat * pix_fmts
 
#define AV_PIX_FMT_YUV420P10
 
VdpGetProcAddress * get_proc_address
 
enum AVPixelFormat * pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
 
This struct is allocated as AVHWDeviceContext.hwctx.
 
#define AV_LOG_VERBOSE
Detailed information.
 
VdpChromaType chroma_type
 
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
 
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
 
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
 
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
 
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
 
static void vdpau_buffer_free(void *opaque, uint8_t *data)
 
static const VDPAUPixFmtMap pix_fmts_422[]
 
VdpVideoSurfaceGetBitsYCbCr * get_data
 
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
 
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities * get_transfer_caps
 
static int count_pixfmts(const VDPAUPixFmtMap *map)
 
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
 
#define AV_PIX_FMT_YUV444P10
 
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
 
#define FF_ARRAY_ELEMS(a)
 
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
 
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
 
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
 
#define AV_PIX_FMT_YUV444P16
 
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
 
static enum AVPixelFormat pix_fmt
 
static enum AVPixelFormat pix_fmts[]
 
static AVBufferRef * vdpau_pool_alloc(void *opaque, size_t size)
 
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
 
enum AVPixelFormat frames_sw_format
 
static const VDPAUPixFmtMap pix_fmts_420[]
 
static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
 
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
 
const HWContextType ff_hwcontext_type_vdpau
 
#define AV_PIX_FMT_YUV422P10
 
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
 
VdpVideoSurfaceDestroy * surf_destroy
 
static void vdpau_device_uninit(AVHWDeviceContext *ctx)
 
#define AV_PIX_FMT_YUV444P12
 
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
 
@ AV_PIX_FMT_NV16
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
 
VdpVideoSurfacePutBitsYCbCr * put_data
 
VdpChromaType chroma_type
 
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
 
#define i(width, name, range_min, range_max)
 
#define av_malloc_array(a, b)
 
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
 
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
 
static const struct @350 vdpau_pix_fmts[]
 
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
 
AVHWFrameTransferDirection
 
This struct describes a set or pool of "hardware" frames (i.e.
 
static const VDPAUPixFmtMap pix_fmts_444[]
 
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
 
#define FFSWAP(type, a, b)
 
#define AV_PIX_FMT_YUV420P12
 
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
 
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
 
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
 
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
 
VdpVideoSurfaceGetBitsYCbCr * get_data
 
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
 
A reference to a data buffer.
 
static int vdpau_device_init(AVHWDeviceContext *ctx)
 
const VDPAUPixFmtMap * map
 
int nb_pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
 
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
 
VdpVideoSurfaceCreate * surf_create
 
#define flags(name, subs,...)
 
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
 
VdpVideoSurfacePutBitsYCbCr * put_data
 
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
 
#define GET_CALLBACK(id, result)