40 #define NS(n) ((n) < 0 ? (int)((n)*65536.0-0.5+DBL_EPSILON) : (int)((n)*65536.0+0.5))
41 #define CB(n) av_clip_uint8(n)
44 { +0.7152, +0.0722, +0.2126 },
45 { +0.5900, +0.1100, +0.3000 },
46 { +0.5870, +0.1140, +0.2990 },
47 { +0.7010, +0.0870, +0.2120 },
48 { +0.6780, +0.0593, +0.2627 },
63 int yuv_convert[25][3][3];
81 #define OFFSET(x) offsetof(ColorMatrixContext, x)
82 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
138 for (i = 0; i < 3; i++)
139 for (j = 0; j < 3; j++)
140 cm[i][j] = yuv[i][0] * rgb[0][j] + yuv[i][1] * rgb[1][j] + yuv[i][2] * rgb[2][j];
146 double yuv_coeff[5][3][3];
147 double rgb_coeffd[5][3][3];
148 double yuv_convertd[25][3][3];
149 double bscale, rscale;
152 for (i = 0; i < 5; i++) {
156 bscale = 0.5 / (yuv_coeff[i][0][1] - 1.0);
157 rscale = 0.5 / (yuv_coeff[i][0][2] - 1.0);
158 yuv_coeff[i][1][0] = bscale * yuv_coeff[i][0][0];
159 yuv_coeff[i][1][1] = 0.5;
160 yuv_coeff[i][1][2] = bscale * yuv_coeff[i][0][2];
161 yuv_coeff[i][2][0] = rscale * yuv_coeff[i][0][0];
162 yuv_coeff[i][2][1] = rscale * yuv_coeff[i][0][1];
163 yuv_coeff[i][2][2] = 0.5;
165 for (i = 0; i < 5; i++)
167 for (i = 0; i < 5; i++) {
168 for (j = 0; j < 5; j++) {
170 for (k = 0; k < 3; k++) {
184 static const char *
const color_modes[] = {
"bt709",
"fcc",
"bt601",
"smpte240m",
"bt2020"};
196 av_log(ctx,
AV_LOG_ERROR,
"Source and destination color space must not be identical\n");
214 const int slice_start = (height * jobnr ) / nb_jobs;
215 const int slice_end = (height * (jobnr+1)) / nb_jobs;
218 const int c2 = td->
c2;
219 const int c3 = td->
c3;
220 const int c4 = td->
c4;
221 const int c5 = td->
c5;
222 const int c6 = td->
c6;
223 const int c7 = td->
c7;
226 for (y = slice_start; y <
slice_end; y++) {
227 for (x = 0; x <
width; x += 4) {
228 const int u = srcp[x + 0] - 128;
229 const int v = srcp[x + 2] - 128;
230 const int uvval = c2 * u + c3 * v + 1081344;
231 dstp[x + 0] =
CB((c4 * u + c5 * v + 8421376) >> 16);
232 dstp[x + 1] =
CB((65536 * (srcp[x + 1] - 16) + uvval) >> 16);
233 dstp[x + 2] =
CB((c6 * u + c7 * v + 8421376) >> 16);
234 dstp[x + 3] =
CB((65536 * (srcp[x + 3] - 16) + uvval) >> 16);
250 const int slice_start = (height * jobnr ) / nb_jobs;
251 const int slice_end = (height * (jobnr+1)) / nb_jobs;
252 const int src_pitchY = src->
linesize[0];
253 const int src_pitchUV = src->
linesize[1];
254 const unsigned char *srcpU = src->
data[1] + slice_start * src_pitchUV;
255 const unsigned char *srcpV = src->
data[2] + slice_start * src_pitchUV;
256 const unsigned char *srcpY = src->
data[0] + slice_start * src_pitchY;
257 const int dst_pitchY = dst->
linesize[0];
258 const int dst_pitchUV = dst->
linesize[1];
259 unsigned char *dstpU = dst->
data[1] + slice_start * dst_pitchUV;
260 unsigned char *dstpV = dst->
data[2] + slice_start * dst_pitchUV;
261 unsigned char *dstpY = dst->
data[0] + slice_start * dst_pitchY;
262 const int c2 = td->
c2;
263 const int c3 = td->
c3;
264 const int c4 = td->
c4;
265 const int c5 = td->
c5;
266 const int c6 = td->
c6;
267 const int c7 = td->
c7;
270 for (y = slice_start; y <
slice_end; y++) {
271 for (x = 0; x <
width; x++) {
272 const int u = srcpU[x] - 128;
273 const int v = srcpV[x] - 128;
274 const int uvval = c2 * u + c3 * v + 1081344;
275 dstpY[x] =
CB((65536 * (srcpY[x] - 16) + uvval) >> 16);
276 dstpU[x] =
CB((c4 * u + c5 * v + 8421376) >> 16);
277 dstpV[x] =
CB((c6 * u + c7 * v + 8421376) >> 16);
281 srcpU += src_pitchUV;
282 srcpV += src_pitchUV;
283 dstpU += dst_pitchUV;
284 dstpV += dst_pitchUV;
297 const int slice_start = (height * jobnr ) / nb_jobs;
298 const int slice_end = (height * (jobnr+1)) / nb_jobs;
299 const int src_pitchY = src->
linesize[0];
300 const int src_pitchUV = src->
linesize[1];
301 const unsigned char *srcpU = src->
data[1] + slice_start * src_pitchUV;
302 const unsigned char *srcpV = src->
data[2] + slice_start * src_pitchUV;
303 const unsigned char *srcpY = src->
data[0] + slice_start * src_pitchY;
304 const int dst_pitchY = dst->
linesize[0];
305 const int dst_pitchUV = dst->
linesize[1];
306 unsigned char *dstpU = dst->
data[1] + slice_start * dst_pitchUV;
307 unsigned char *dstpV = dst->
data[2] + slice_start * dst_pitchUV;
308 unsigned char *dstpY = dst->
data[0] + slice_start * dst_pitchY;
309 const int c2 = td->
c2;
310 const int c3 = td->
c3;
311 const int c4 = td->
c4;
312 const int c5 = td->
c5;
313 const int c6 = td->
c6;
314 const int c7 = td->
c7;
317 for (y = slice_start; y <
slice_end; y++) {
318 for (x = 0; x <
width; x += 2) {
319 const int u = srcpU[x >> 1] - 128;
320 const int v = srcpV[x >> 1] - 128;
321 const int uvval = c2 * u + c3 * v + 1081344;
322 dstpY[x + 0] =
CB((65536 * (srcpY[x + 0] - 16) + uvval) >> 16);
323 dstpY[x + 1] =
CB((65536 * (srcpY[x + 1] - 16) + uvval) >> 16);
324 dstpU[x >> 1] =
CB((c4 * u + c5 * v + 8421376) >> 16);
325 dstpV[x >> 1] =
CB((c6 * u + c7 * v + 8421376) >> 16);
329 srcpU += src_pitchUV;
330 srcpV += src_pitchUV;
331 dstpU += dst_pitchUV;
332 dstpV += dst_pitchUV;
345 const int slice_start = ((height * jobnr ) / nb_jobs) << 1;
346 const int slice_end = ((height * (jobnr+1)) / nb_jobs) << 1;
347 const int src_pitchY = src->
linesize[0];
348 const int src_pitchUV = src->
linesize[1];
349 const int dst_pitchY = dst->
linesize[0];
350 const int dst_pitchUV = dst->
linesize[1];
351 const unsigned char *srcpY = src->
data[0] + src_pitchY * slice_start;
352 const unsigned char *srcpU = src->
data[1] + src_pitchUV * (slice_start >> 1);
353 const unsigned char *srcpV = src->
data[2] + src_pitchUV * (slice_start >> 1);
354 const unsigned char *srcpN = src->
data[0] + src_pitchY * (slice_start + 1);
355 unsigned char *dstpU = dst->
data[1] + dst_pitchUV * (slice_start >> 1);
356 unsigned char *dstpV = dst->
data[2] + dst_pitchUV * (slice_start >> 1);
357 unsigned char *dstpY = dst->
data[0] + dst_pitchY * slice_start;
358 unsigned char *dstpN = dst->
data[0] + dst_pitchY * (slice_start + 1);
359 const int c2 = td->
c2;
360 const int c3 = td->
c3;
361 const int c4 = td->
c4;
362 const int c5 = td->
c5;
363 const int c6 = td->
c6;
364 const int c7 = td->
c7;
367 for (y = slice_start; y <
slice_end; y += 2) {
368 for (x = 0; x <
width; x += 2) {
369 const int u = srcpU[x >> 1] - 128;
370 const int v = srcpV[x >> 1] - 128;
371 const int uvval = c2 * u + c3 * v + 1081344;
372 dstpY[x + 0] =
CB((65536 * (srcpY[x + 0] - 16) + uvval) >> 16);
373 dstpY[x + 1] =
CB((65536 * (srcpY[x + 1] - 16) + uvval) >> 16);
374 dstpN[x + 0] =
CB((65536 * (srcpN[x + 0] - 16) + uvval) >> 16);
375 dstpN[x + 1] =
CB((65536 * (srcpN[x + 1] - 16) + uvval) >> 16);
376 dstpU[x >> 1] =
CB((c4 * u + c5 * v + 8421376) >> 16);
377 dstpV[x >> 1] =
CB((c6 * u + c7 * v + 8421376) >> 16);
379 srcpY += src_pitchY << 1;
380 dstpY += dst_pitchY << 1;
381 srcpN += src_pitchY << 1;
382 dstpN += dst_pitchY << 1;
383 srcpU += src_pitchUV;
384 srcpV += src_pitchUV;
385 dstpU += dst_pitchUV;
386 dstpV += dst_pitchUV;
450 av_log(ctx,
AV_LOG_ERROR,
"Input frame does not specify a supported colorspace, and none has been specified as source either\n");
454 color->
mode = source * 5 + color->
dest;
458 switch(color->
dest) {
511 .
name =
"colormatrix",
516 .
inputs = colormatrix_inputs,
517 .
outputs = colormatrix_outputs,
518 .priv_class = &colormatrix_class,
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
static av_cold int init(AVFilterContext *ctx)
static const char *const color_modes[]
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
BYTE int const BYTE int src_pitch
static int filter_frame(AVFilterLink *link, AVFrame *in)
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Main libavfilter public API header.
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
int h
agreed upon image height
static int process_slice_yuv444p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
static int query_formats(AVFilterContext *ctx)
functionally identical to above
struct AVFilterGraph * graph
filtergraph this filter belongs to
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
int yuv_convert[25][3][3]
BYTE int const BYTE * srcp
const char * name
Pad name.
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
AVColorSpace
YUV colorspace type.
static const AVFilterPad colormatrix_inputs[]
int nb_threads
Maximum number of threads used by filters in this graph.
#define AV_LOG_VERBOSE
Detailed information.
static int config_input(AVFilterLink *inlink)
A filter pad used for either input or output.
A link between two filters.
static int process_slice_yuv422p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
int width
width and height of the video frame
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
void av_frame_set_colorspace(AVFrame *frame, enum AVColorSpace val)
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
static int process_slice_uyvy422(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
void * priv
private data for use by the filter
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
AVFILTER_DEFINE_CLASS(colormatrix)
static void inverse3x3(double im[3][3], double m[3][3])
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
int w
agreed upon image width
ITU-R BT2020 non-constant luminance system.
static const AVFilterPad outputs[]
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
int format
agreed upon media format
static void calc_coefficients(AVFilterContext *ctx)
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
static const AVFilterPad inputs[]
static void solve_coefficients(double cm[3][3], double rgb[3][3], double yuv[3][3])
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> in
enum AVColorSpace av_frame_get_colorspace(const AVFrame *frame)
Describe the class of an AVClass context structure.
const char * name
Filter name.
AVFilterLink ** outputs
array of pointers to output links
static enum AVPixelFormat pix_fmts[]
AVFilterInternal * internal
An opaque struct for libavfilter internal use.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
ITU-R BT2020 constant luminance system.
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
avfilter_execute_func * execute
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
AVFilterContext * dst
dest filter
static const double yuv_coeff_luma[5][3]
static int process_slice_yuv420p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
static const AVFilterPad colormatrix_outputs[]
AVFilter ff_vf_colormatrix
AVPixelFormat
Pixel format.
static const AVOption colormatrix_options[]
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.