Go to the documentation of this file.
21 #include <mfx/mfxvideo.h>
22 #include <mfx/mfxplugin.h>
23 #include <mfx/mfxjpeg.h>
39 #if QSV_VERSION_ATLEAST(1, 12)
40 #include "mfx/mfxvp8.h"
48 #if QSV_VERSION_ATLEAST(1, 8)
50 return MFX_CODEC_HEVC;
54 return MFX_CODEC_MPEG2;
57 #if QSV_VERSION_ATLEAST(1, 12)
62 return MFX_CODEC_JPEG;
63 #if QSV_VERSION_ATLEAST(1, 19)
67 #if QSV_VERSION_ATLEAST(1, 34)
82 return MFX_LEVEL_UNKNOWN;
96 {MFX_IOPATTERN_IN_VIDEO_MEMORY,
"input is video memory surface" },
97 {MFX_IOPATTERN_IN_SYSTEM_MEMORY,
"input is system memory surface" },
98 {MFX_IOPATTERN_IN_OPAQUE_MEMORY,
"input is opaque memory surface" },
99 {MFX_IOPATTERN_OUT_VIDEO_MEMORY,
"output is video memory surface" },
100 {MFX_IOPATTERN_OUT_SYSTEM_MEMORY,
"output is system memory surface" },
101 {MFX_IOPATTERN_OUT_OPAQUE_MEMORY,
"output is opaque memory surface" },
105 const char *extra_string)
115 desc =
"unknown iopattern";
121 static const struct {
126 { MFX_ERR_NONE, 0,
"success" },
128 { MFX_ERR_NULL_PTR,
AVERROR(EINVAL),
"NULL pointer" },
129 { MFX_ERR_UNSUPPORTED,
AVERROR(ENOSYS),
"unsupported" },
130 { MFX_ERR_MEMORY_ALLOC,
AVERROR(ENOMEM),
"failed to allocate memory" },
131 { MFX_ERR_NOT_ENOUGH_BUFFER,
AVERROR(ENOMEM),
"insufficient input/output buffer" },
132 { MFX_ERR_INVALID_HANDLE,
AVERROR(EINVAL),
"invalid handle" },
133 { MFX_ERR_LOCK_MEMORY,
AVERROR(EIO),
"failed to lock the memory block" },
134 { MFX_ERR_NOT_INITIALIZED,
AVERROR_BUG,
"not initialized" },
135 { MFX_ERR_NOT_FOUND,
AVERROR(ENOSYS),
"specified object was not found" },
139 { MFX_ERR_MORE_SURFACE,
AVERROR_UNKNOWN,
"expect more surface at output" },
140 { MFX_ERR_MORE_BITSTREAM,
AVERROR_UNKNOWN,
"expect more bitstream at output" },
142 { MFX_ERR_DEVICE_LOST,
AVERROR(EIO),
"device lost" },
143 { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM,
AVERROR(EINVAL),
"incompatible video parameters" },
144 { MFX_ERR_INVALID_VIDEO_PARAM,
AVERROR(EINVAL),
"invalid video parameters" },
145 { MFX_ERR_UNDEFINED_BEHAVIOR,
AVERROR_BUG,
"undefined behavior" },
146 { MFX_ERR_DEVICE_FAILED,
AVERROR(EIO),
"device failed" },
147 { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM,
AVERROR(EINVAL),
"incompatible audio parameters" },
148 { MFX_ERR_INVALID_AUDIO_PARAM,
AVERROR(EINVAL),
"invalid audio parameters" },
150 { MFX_WRN_IN_EXECUTION, 0,
"operation in execution" },
151 { MFX_WRN_DEVICE_BUSY, 0,
"device busy" },
152 { MFX_WRN_VIDEO_PARAM_CHANGED, 0,
"video parameters changed" },
153 { MFX_WRN_PARTIAL_ACCELERATION, 0,
"partial acceleration" },
154 { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0,
"incompatible video parameters" },
155 { MFX_WRN_VALUE_NOT_CHANGED, 0,
"value is saturated" },
156 { MFX_WRN_OUT_OF_RANGE, 0,
"value out of range" },
157 { MFX_WRN_FILTER_SKIPPED, 0,
"filter skipped" },
158 { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0,
"incompatible audio parameters" },
172 *
desc =
"unknown error";
177 const char *error_string)
187 const char *warning_string)
204 #if QSV_VERSION_ATLEAST(1, 27)
218 *
fourcc = MFX_FOURCC_NV12;
222 *
fourcc = MFX_FOURCC_P010;
227 *
fourcc = MFX_FOURCC_YUY2;
229 #if QSV_VERSION_ATLEAST(1, 27)
232 *
fourcc = MFX_FOURCC_Y210;
244 for (
i = 0;
i <
ctx->nb_mids;
i++) {
255 switch (mfx_pic_struct & 0xF) {
256 case MFX_PICSTRUCT_PROGRESSIVE:
259 case MFX_PICSTRUCT_FIELD_TFF:
262 case MFX_PICSTRUCT_FIELD_BFF:
273 switch (mfx_pic_type & 0x7) {
274 case MFX_FRAMETYPE_I:
275 if (mfx_pic_type & MFX_FRAMETYPE_S)
280 case MFX_FRAMETYPE_B:
283 case MFX_FRAMETYPE_P:
284 if (mfx_pic_type & MFX_FRAMETYPE_S)
289 case MFX_FRAMETYPE_UNKNOWN:
302 if (!load_plugins || !*load_plugins)
305 while (*load_plugins) {
313 if (strlen(plugin) != 2 *
sizeof(
uid.Data)) {
316 goto load_plugin_fail;
319 for (
i = 0;
i <
sizeof(
uid.Data);
i++) {
320 err = sscanf(plugin + 2 *
i,
"%2hhx",
uid.Data +
i);
324 goto load_plugin_fail;
329 ret = MFXVideoUSER_Load(session, &
uid, 1);
332 snprintf(errorbuf,
sizeof(errorbuf),
333 "Could not load the requested plugin '%s'", plugin);
335 goto load_plugin_fail;
353 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
360 av_dict_set(&child_device_opts,
"kernel_driver",
"i915", 0);
361 av_dict_set(&child_device_opts,
"driver",
"iHD", 0);
370 hwctx = qs->va_device_ctx->hwctx;
373 (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)hwctx->
display);
381 #endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
384 const char *load_plugins,
int gpu_copy)
386 mfxIMPL impl = MFX_IMPL_AUTO_ANY;
388 mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
393 #if QSV_VERSION_ATLEAST(1, 16)
394 init_par.GPUCopy = gpu_copy;
396 init_par.Implementation = impl;
397 init_par.Version = ver;
401 "Error initializing an internal MFX session");
403 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
404 ret = ff_qsv_set_display_handle(avctx, qs);
415 MFXQueryIMPL(qs->
session, &impl);
417 switch (MFX_IMPL_BASETYPE(impl)) {
418 case MFX_IMPL_SOFTWARE:
421 case MFX_IMPL_HARDWARE:
422 case MFX_IMPL_HARDWARE2:
423 case MFX_IMPL_HARDWARE3:
424 case MFX_IMPL_HARDWARE4:
425 desc =
"hardware accelerated";
432 "Initialized an internal MFX session using %s implementation\n",
449 int nb_surfaces = frames_hwctx->nb_surfaces;
473 for (
i = 0;
i < nb_surfaces;
i++) {
475 mid->
handle = frames_hwctx->surfaces[
i].Data.MemId;
488 int nb_surfaces = frames_hwctx->nb_surfaces;
498 for (
i = 0;
i < nb_surfaces;
i++)
499 resp->mids[
i] = &mids[
i];
500 resp->NumFrameActual = nb_surfaces;
502 resp->mids[resp->NumFrameActual] = (mfxMemId)
av_buffer_ref(hw_frames_ref);
503 if (!resp->mids[resp->NumFrameActual]) {
508 resp->mids[resp->NumFrameActual + 1] =
av_buffer_ref(mids_buf);
509 if (!resp->mids[resp->NumFrameActual + 1]) {
519 mfxFrameAllocResponse *resp)
526 if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
527 MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) ||
528 !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
529 return MFX_ERR_UNSUPPORTED;
531 if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
535 mfxFrameInfo *
i = &req->Info;
536 mfxFrameInfo *i1 = &frames_hwctx->surfaces[0].Info;
538 if (
i->Width > i1->Width ||
i->Height > i1->Height ||
539 i->FourCC != i1->FourCC ||
i->ChromaFormat != i1->ChromaFormat) {
541 "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
542 i->Width,
i->Height,
i->FourCC,
i->ChromaFormat,
543 i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
544 return MFX_ERR_UNSUPPORTED;
550 "Error filling an external frame allocation request\n");
551 return MFX_ERR_MEMORY_ALLOC;
553 }
else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
556 mfxFrameInfo *
i = &req->Info;
564 return MFX_ERR_MEMORY_ALLOC;
567 frames_hwctx = frames_ctx->hwctx;
571 frames_ctx->width =
i->Width;
572 frames_ctx->height =
i->Height;
573 frames_ctx->initial_pool_size = req->NumFrameSuggested;
575 frames_hwctx->frame_type = req->Type;
580 "Error initializing a frames context for an internal frame "
581 "allocation request\n");
583 return MFX_ERR_MEMORY_ALLOC;
589 return MFX_ERR_MEMORY_ALLOC;
597 "Error filling an internal frame allocation request\n");
598 return MFX_ERR_MEMORY_ALLOC;
601 return MFX_ERR_UNSUPPORTED;
623 return MFX_ERR_UNDEFINED_BEHAVIOR;
628 return MFX_ERR_MEMORY_ALLOC;
651 qsv_mid->
surf.Info = hw_frames_hwctx->surfaces[0].Info;
669 return MFX_ERR_MEMORY_ALLOC;
694 MFX_HANDLE_VA_DISPLAY,
695 MFX_HANDLE_D3D9_DEVICE_MANAGER,
696 MFX_HANDLE_D3D11_DEVICE,
700 mfxSession parent_session = device_hwctx->session;
701 mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
702 mfxHDL handle =
NULL;
712 err = MFXQueryIMPL(parent_session, &impl);
713 if (err == MFX_ERR_NONE)
714 err = MFXQueryVersion(parent_session, &ver);
715 if (err != MFX_ERR_NONE)
717 "Error querying the session attributes");
720 err = MFXVideoCORE_GetHandle(parent_session,
handle_types[
i], &handle);
721 if (err == MFX_ERR_NONE) {
729 "from the session\n");
732 #if QSV_VERSION_ATLEAST(1, 16)
733 init_par.GPUCopy = gpu_copy;
735 init_par.Implementation = impl;
736 init_par.Version = ver;
737 err = MFXInitEx(init_par, &session);
738 if (err != MFX_ERR_NONE)
740 "Error initializing a child MFX session");
743 err = MFXVideoCORE_SetHandle(session,
handle_type, handle);
744 if (err != MFX_ERR_NONE)
746 "Error setting a HW handle");
750 err = MFXJoinSession(parent_session, session);
751 if (err != MFX_ERR_NONE)
753 "Error joining session");
768 const char *load_plugins,
int opaque,
int gpu_copy)
770 mfxFrameAllocator frame_allocator = {
771 .pthis = qsv_frames_ctx,
788 frames_ctx->
device_ref, load_plugins, gpu_copy);
793 qsv_frames_ctx->
logctx = avctx;
801 qsv_frames_ctx->
nb_mids = frames_hwctx->nb_surfaces;
803 err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
804 if (err != MFX_ERR_NONE)
806 "Error setting a frame allocator");
819 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
#define AV_LOG_WARNING
Something somehow does not look correct.
AVPixelFormat
Pixel format.
static const mfxHandleType handle_types[]
AVBufferRef * av_buffer_alloc(buffer_size_t size)
Allocate an AVBuffer of the given size using av_malloc().
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
#define QSV_VERSION_MAJOR
AVBufferRef * hw_frames_ctx
char * av_get_token(const char **buf, const char *term)
Unescape the given string until a non escaped terminating char, and return the token corresponding to...
uint8_t * data
The data buffer.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
int ff_qsv_close_internal_session(QSVSession *qs)
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
This struct is allocated as AVHWDeviceContext.hwctx.
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
#define AV_PIX_FMT_YUV420P10
void * av_mallocz_array(size_t nmemb, size_t size)
#define AV_LOG_VERBOSE
Detailed information.
VADisplay display
The VADisplay handle, to be filled by the user.
AVBufferRef * av_buffer_create(uint8_t *data, buffer_size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
int width
The allocated dimensions of the frames in this pool.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
static int qsv_load_plugins(mfxSession session, const char *load_plugins, void *logctx)
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
mfxHandleType handle_type
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
#define FF_ARRAY_ELEMS(a)
#define QSV_VERSION_MINOR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
int ff_qsv_level_to_mfx(enum AVCodecID codec_id, int level)
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
#define av_assert0(cond)
assert() equivalent, that is always enabled.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
static void mids_buf_free(void *opaque, uint8_t *data)
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
@ AV_PICTURE_TYPE_SI
Switching Intra.
@ AV_PICTURE_TYPE_I
Intra.
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
static const struct @114 qsv_iopatterns[]
#define AV_PIX_FMT_YUV422P10
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
@ AV_PICTURE_TYPE_SP
Switching Predicted.
AVCodecID
Identify the syntax and semantics of the bitstream.
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc)
AVBufferRef * hw_frames_ref
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
@ AV_PICTURE_TYPE_NONE
Undefined.
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
static const struct @115 qsv_errors[]
This struct describes a set or pool of "hardware" frames (i.e.
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
main external API structure.
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
This struct is allocated as AVHWFramesContext.hwctx.
@ AV_PICTURE_TYPE_P
Predicted.
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
A reference to a data buffer.
static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref, AVBufferRef *mids_buf)
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
static AVBufferRef * qsv_create_mids(AVBufferRef *hw_frames_ref)
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
VAAPI connection details.
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
int ff_qsv_map_error(mfxStatus mfx_err, const char **desc)
Convert a libmfx error code into an ffmpeg error code.