ffmpeg / libavfilter / vf_pad.c @ b9f9e59a
History | View | Annotate | Download (15.5 KB)
1 |
/*
|
---|---|
2 |
* copyright (c) 2008 vmrsss
|
3 |
* copyright (c) 2009 Stefano Sabatini
|
4 |
*
|
5 |
* This file is part of FFmpeg.
|
6 |
*
|
7 |
* FFmpeg is free software; you can redistribute it and/or
|
8 |
* modify it under the terms of the GNU Lesser General Public
|
9 |
* License as published by the Free Software Foundation; either
|
10 |
* version 2.1 of the License, or (at your option) any later version.
|
11 |
*
|
12 |
* FFmpeg is distributed in the hope that it will be useful,
|
13 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
14 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
15 |
* Lesser General Public License for more details.
|
16 |
*
|
17 |
* You should have received a copy of the GNU Lesser General Public
|
18 |
* License along with FFmpeg; if not, write to the Free Software
|
19 |
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
20 |
*/
|
21 |
|
22 |
/**
|
23 |
* @file
|
24 |
* video padding filter and color source
|
25 |
*/
|
26 |
|
27 |
#include "avfilter.h" |
28 |
#include "parseutils.h" |
29 |
#include "libavutil/pixdesc.h" |
30 |
#include "libavutil/colorspace.h" |
31 |
#include "libavcore/imgutils.h" |
32 |
#include "libavcore/parseutils.h" |
33 |
|
34 |
enum { RED = 0, GREEN, BLUE, ALPHA }; |
35 |
|
36 |
static int fill_line_with_color(uint8_t *line[4], int line_step[4], int w, uint8_t color[4], |
37 |
enum PixelFormat pix_fmt, uint8_t rgba_color[4], int *is_packed_rgba) |
38 |
{ |
39 |
uint8_t rgba_map[4] = {0}; |
40 |
int i;
|
41 |
const AVPixFmtDescriptor *pix_desc = &av_pix_fmt_descriptors[pix_fmt];
|
42 |
int hsub = pix_desc->log2_chroma_w;
|
43 |
|
44 |
*is_packed_rgba = 1;
|
45 |
switch (pix_fmt) {
|
46 |
case PIX_FMT_ARGB: rgba_map[ALPHA] = 0; rgba_map[RED ] = 1; rgba_map[GREEN] = 2; rgba_map[BLUE ] = 3; break; |
47 |
case PIX_FMT_ABGR: rgba_map[ALPHA] = 0; rgba_map[BLUE ] = 1; rgba_map[GREEN] = 2; rgba_map[RED ] = 3; break; |
48 |
case PIX_FMT_RGBA:
|
49 |
case PIX_FMT_RGB24: rgba_map[RED ] = 0; rgba_map[GREEN] = 1; rgba_map[BLUE ] = 2; rgba_map[ALPHA] = 3; break; |
50 |
case PIX_FMT_BGRA:
|
51 |
case PIX_FMT_BGR24: rgba_map[BLUE ] = 0; rgba_map[GREEN] = 1; rgba_map[RED ] = 2; rgba_map[ALPHA] = 3; break; |
52 |
default:
|
53 |
*is_packed_rgba = 0;
|
54 |
} |
55 |
|
56 |
if (*is_packed_rgba) {
|
57 |
line_step[0] = (av_get_bits_per_pixel(pix_desc))>>3; |
58 |
for (i = 0; i < 4; i++) |
59 |
color[rgba_map[i]] = rgba_color[i]; |
60 |
|
61 |
line[0] = av_malloc(w * line_step[0]); |
62 |
for (i = 0; i < w; i++) |
63 |
memcpy(line[0] + i * line_step[0], color, line_step[0]); |
64 |
} else {
|
65 |
int plane;
|
66 |
|
67 |
color[RED ] = RGB_TO_Y_CCIR(rgba_color[0], rgba_color[1], rgba_color[2]); |
68 |
color[GREEN] = RGB_TO_U_CCIR(rgba_color[0], rgba_color[1], rgba_color[2], 0); |
69 |
color[BLUE ] = RGB_TO_V_CCIR(rgba_color[0], rgba_color[1], rgba_color[2], 0); |
70 |
color[ALPHA] = rgba_color[3];
|
71 |
|
72 |
for (plane = 0; plane < 4; plane++) { |
73 |
int line_size;
|
74 |
int hsub1 = (plane == 1 || plane == 2) ? hsub : 0; |
75 |
|
76 |
line_step[plane] = 1;
|
77 |
line_size = (w >> hsub1) * line_step[plane]; |
78 |
line[plane] = av_malloc(line_size); |
79 |
memset(line[plane], color[plane], line_size); |
80 |
} |
81 |
} |
82 |
|
83 |
return 0; |
84 |
} |
85 |
|
86 |
static void draw_rectangle(AVFilterBufferRef *outpic, uint8_t *line[4], int line_step[4], |
87 |
int hsub, int vsub, int x, int y, int w, int h) |
88 |
{ |
89 |
int i, plane;
|
90 |
uint8_t *p; |
91 |
|
92 |
for (plane = 0; plane < 4 && outpic->data[plane]; plane++) { |
93 |
int hsub1 = plane == 1 || plane == 2 ? hsub : 0; |
94 |
int vsub1 = plane == 1 || plane == 2 ? vsub : 0; |
95 |
|
96 |
p = outpic->data[plane] + (y >> vsub1) * outpic->linesize[plane]; |
97 |
for (i = 0; i < (h >> vsub1); i++) { |
98 |
memcpy(p + (x >> hsub1) * line_step[plane], line[plane], (w >> hsub1) * line_step[plane]); |
99 |
p += outpic->linesize[plane]; |
100 |
} |
101 |
} |
102 |
} |
103 |
|
104 |
static int query_formats(AVFilterContext *ctx) |
105 |
{ |
106 |
static const enum PixelFormat pix_fmts[] = { |
107 |
PIX_FMT_ARGB, PIX_FMT_RGBA, |
108 |
PIX_FMT_ABGR, PIX_FMT_BGRA, |
109 |
PIX_FMT_RGB24, PIX_FMT_BGR24, |
110 |
|
111 |
PIX_FMT_YUV444P, PIX_FMT_YUV422P, |
112 |
PIX_FMT_YUV420P, PIX_FMT_YUV411P, |
113 |
PIX_FMT_YUV410P, PIX_FMT_YUV440P, |
114 |
PIX_FMT_YUVJ444P, PIX_FMT_YUVJ422P, |
115 |
PIX_FMT_YUVJ420P, PIX_FMT_YUVJ440P, |
116 |
PIX_FMT_YUVA420P, |
117 |
|
118 |
PIX_FMT_NONE |
119 |
}; |
120 |
|
121 |
avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts)); |
122 |
return 0; |
123 |
} |
124 |
|
125 |
#if CONFIG_PAD_FILTER
|
126 |
|
127 |
typedef struct { |
128 |
int w, h; ///< output dimensions, a value of 0 will result in the input size |
129 |
int x, y; ///< offsets of the input area with respect to the padded area |
130 |
int in_w, in_h; ///< width and height for the padded input video, which has to be aligned to the chroma values in order to avoid chroma issues |
131 |
|
132 |
uint8_t color[4]; ///< color expressed either in YUVA or RGBA colorspace for the padding area |
133 |
uint8_t *line[4];
|
134 |
int line_step[4]; |
135 |
int hsub, vsub; ///< chroma subsampling values |
136 |
} PadContext; |
137 |
|
138 |
static av_cold int init(AVFilterContext *ctx, const char *args, void *opaque) |
139 |
{ |
140 |
PadContext *pad = ctx->priv; |
141 |
char color_string[128] = "black"; |
142 |
|
143 |
if (args)
|
144 |
sscanf(args, "%d:%d:%d:%d:%s", &pad->w, &pad->h, &pad->x, &pad->y, color_string);
|
145 |
|
146 |
if (av_parse_color(pad->color, color_string, ctx) < 0) |
147 |
return AVERROR(EINVAL);
|
148 |
|
149 |
/* sanity check params */
|
150 |
if (pad->w < 0 || pad->h < 0) { |
151 |
av_log(ctx, AV_LOG_ERROR, "Negative size values are not acceptable.\n");
|
152 |
return AVERROR(EINVAL);
|
153 |
} |
154 |
|
155 |
return 0; |
156 |
} |
157 |
|
158 |
static av_cold void uninit(AVFilterContext *ctx) |
159 |
{ |
160 |
PadContext *pad = ctx->priv; |
161 |
int i;
|
162 |
|
163 |
for (i = 0; i < 4; i++) { |
164 |
av_freep(&pad->line[i]); |
165 |
pad->line_step[i] = 0;
|
166 |
} |
167 |
} |
168 |
|
169 |
static int config_input(AVFilterLink *inlink) |
170 |
{ |
171 |
AVFilterContext *ctx = inlink->dst; |
172 |
PadContext *pad = ctx->priv; |
173 |
const AVPixFmtDescriptor *pix_desc = &av_pix_fmt_descriptors[inlink->format];
|
174 |
uint8_t rgba_color[4];
|
175 |
int is_packed_rgba;
|
176 |
|
177 |
pad->hsub = pix_desc->log2_chroma_w; |
178 |
pad->vsub = pix_desc->log2_chroma_h; |
179 |
|
180 |
if (!pad->w)
|
181 |
pad->w = inlink->w; |
182 |
if (!pad->h)
|
183 |
pad->h = inlink->h; |
184 |
|
185 |
pad->w &= ~((1 << pad->hsub) - 1); |
186 |
pad->h &= ~((1 << pad->vsub) - 1); |
187 |
pad->x &= ~((1 << pad->hsub) - 1); |
188 |
pad->y &= ~((1 << pad->vsub) - 1); |
189 |
|
190 |
pad->in_w = inlink->w & ~((1 << pad->hsub) - 1); |
191 |
pad->in_h = inlink->h & ~((1 << pad->vsub) - 1); |
192 |
|
193 |
memcpy(rgba_color, pad->color, sizeof(rgba_color));
|
194 |
fill_line_with_color(pad->line, pad->line_step, pad->w, pad->color, |
195 |
inlink->format, rgba_color, &is_packed_rgba); |
196 |
|
197 |
av_log(ctx, AV_LOG_INFO, "w:%d h:%d x:%d y:%d color:0x%02X%02X%02X%02X[%s]\n",
|
198 |
pad->w, pad->h, pad->x, pad->y, |
199 |
pad->color[0], pad->color[1], pad->color[2], pad->color[3], |
200 |
is_packed_rgba ? "rgba" : "yuva"); |
201 |
|
202 |
if (pad->x < 0 || pad->y < 0 || |
203 |
pad->w <= 0 || pad->h <= 0 || |
204 |
(unsigned)pad->x + (unsigned)inlink->w > pad->w || |
205 |
(unsigned)pad->y + (unsigned)inlink->h > pad->h) { |
206 |
av_log(ctx, AV_LOG_ERROR, |
207 |
"Input area %d:%d:%d:%d not within the padded area 0:0:%d:%d or zero-sized\n",
|
208 |
pad->x, pad->y, pad->x + inlink->w, pad->y + inlink->h, pad->w, pad->h); |
209 |
return AVERROR(EINVAL);
|
210 |
} |
211 |
|
212 |
return 0; |
213 |
} |
214 |
|
215 |
static int config_output(AVFilterLink *outlink) |
216 |
{ |
217 |
PadContext *pad = outlink->src->priv; |
218 |
|
219 |
outlink->w = pad->w; |
220 |
outlink->h = pad->h; |
221 |
return 0; |
222 |
} |
223 |
|
224 |
static AVFilterBufferRef *get_video_buffer(AVFilterLink *inlink, int perms, int w, int h) |
225 |
{ |
226 |
PadContext *pad = inlink->dst->priv; |
227 |
|
228 |
AVFilterBufferRef *picref = avfilter_get_video_buffer(inlink->dst->outputs[0], perms,
|
229 |
w + (pad->w - pad->in_w), |
230 |
h + (pad->h - pad->in_h)); |
231 |
int plane;
|
232 |
|
233 |
for (plane = 0; plane < 4 && picref->data[plane]; plane++) { |
234 |
int hsub = (plane == 1 || plane == 2) ? pad->hsub : 0; |
235 |
int vsub = (plane == 1 || plane == 2) ? pad->vsub : 0; |
236 |
|
237 |
picref->data[plane] += (pad->x >> hsub) * pad->line_step[plane] + |
238 |
(pad->y >> vsub) * picref->linesize[plane]; |
239 |
} |
240 |
|
241 |
return picref;
|
242 |
} |
243 |
|
244 |
static void start_frame(AVFilterLink *inlink, AVFilterBufferRef *inpicref) |
245 |
{ |
246 |
PadContext *pad = inlink->dst->priv; |
247 |
AVFilterBufferRef *outpicref = avfilter_ref_buffer(inpicref, ~0);
|
248 |
int plane;
|
249 |
|
250 |
inlink->dst->outputs[0]->out_buf = outpicref;
|
251 |
|
252 |
for (plane = 0; plane < 4 && outpicref->data[plane]; plane++) { |
253 |
int hsub = (plane == 1 || plane == 2) ? pad->hsub : 0; |
254 |
int vsub = (plane == 1 || plane == 2) ? pad->vsub : 0; |
255 |
|
256 |
outpicref->data[plane] -= (pad->x >> hsub) * pad->line_step[plane] + |
257 |
(pad->y >> vsub) * outpicref->linesize[plane]; |
258 |
} |
259 |
|
260 |
avfilter_start_frame(inlink->dst->outputs[0], outpicref);
|
261 |
} |
262 |
|
263 |
static void end_frame(AVFilterLink *link) |
264 |
{ |
265 |
avfilter_end_frame(link->dst->outputs[0]);
|
266 |
avfilter_unref_buffer(link->cur_buf); |
267 |
} |
268 |
|
269 |
static void draw_send_bar_slice(AVFilterLink *link, int y, int h, int slice_dir, int before_slice) |
270 |
{ |
271 |
PadContext *pad = link->dst->priv; |
272 |
int bar_y, bar_h = 0; |
273 |
|
274 |
if (slice_dir * before_slice == 1 && y == pad->y) { |
275 |
/* top bar */
|
276 |
bar_y = 0;
|
277 |
bar_h = pad->y; |
278 |
} else if (slice_dir * before_slice == -1 && (y + h) == (pad->y + pad->in_h)) { |
279 |
/* bottom bar */
|
280 |
bar_y = pad->y + pad->in_h; |
281 |
bar_h = pad->h - pad->in_h - pad->y; |
282 |
} |
283 |
|
284 |
if (bar_h) {
|
285 |
draw_rectangle(link->dst->outputs[0]->out_buf,
|
286 |
pad->line, pad->line_step, pad->hsub, pad->vsub, |
287 |
0, bar_y, pad->w, bar_h);
|
288 |
avfilter_draw_slice(link->dst->outputs[0], bar_y, bar_h, slice_dir);
|
289 |
} |
290 |
} |
291 |
|
292 |
static void draw_slice(AVFilterLink *link, int y, int h, int slice_dir) |
293 |
{ |
294 |
PadContext *pad = link->dst->priv; |
295 |
AVFilterBufferRef *outpic = link->dst->outputs[0]->out_buf;
|
296 |
|
297 |
y += pad->y; |
298 |
|
299 |
y &= ~((1 << pad->vsub) - 1); |
300 |
h &= ~((1 << pad->vsub) - 1); |
301 |
|
302 |
if (!h)
|
303 |
return;
|
304 |
draw_send_bar_slice(link, y, h, slice_dir, 1);
|
305 |
|
306 |
/* left border */
|
307 |
draw_rectangle(outpic, pad->line, pad->line_step, pad->hsub, pad->vsub, |
308 |
0, y, pad->x, h);
|
309 |
/* right border */
|
310 |
draw_rectangle(outpic, pad->line, pad->line_step, pad->hsub, pad->vsub, |
311 |
pad->x + pad->in_w, y, pad->w - pad->x - pad->in_w, h); |
312 |
avfilter_draw_slice(link->dst->outputs[0], y, h, slice_dir);
|
313 |
|
314 |
draw_send_bar_slice(link, y, h, slice_dir, -1);
|
315 |
} |
316 |
|
317 |
AVFilter avfilter_vf_pad = { |
318 |
.name = "pad",
|
319 |
.description = NULL_IF_CONFIG_SMALL("Pad input image to width:height[:x:y[:color]] (default x and y: 0, default color: black)."),
|
320 |
|
321 |
.priv_size = sizeof(PadContext),
|
322 |
.init = init, |
323 |
.uninit = uninit, |
324 |
.query_formats = query_formats, |
325 |
|
326 |
.inputs = (AVFilterPad[]) {{ .name = "default",
|
327 |
.type = AVMEDIA_TYPE_VIDEO, |
328 |
.config_props = config_input, |
329 |
.get_video_buffer = get_video_buffer, |
330 |
.start_frame = start_frame, |
331 |
.draw_slice = draw_slice, |
332 |
.end_frame = end_frame, }, |
333 |
{ .name = NULL}},
|
334 |
|
335 |
.outputs = (AVFilterPad[]) {{ .name = "default",
|
336 |
.type = AVMEDIA_TYPE_VIDEO, |
337 |
.config_props = config_output, }, |
338 |
{ .name = NULL}},
|
339 |
}; |
340 |
|
341 |
#endif /* CONFIG_PAD_FILTER */ |
342 |
|
343 |
#if CONFIG_COLOR_FILTER
|
344 |
|
345 |
typedef struct { |
346 |
int w, h;
|
347 |
uint8_t color[4];
|
348 |
AVRational time_base; |
349 |
uint8_t *line[4];
|
350 |
int line_step[4]; |
351 |
int hsub, vsub; ///< chroma subsampling values |
352 |
uint64_t pts; |
353 |
} ColorContext; |
354 |
|
355 |
static av_cold int color_init(AVFilterContext *ctx, const char *args, void *opaque) |
356 |
{ |
357 |
ColorContext *color = ctx->priv; |
358 |
char color_string[128] = "black"; |
359 |
char frame_size [128] = "320x240"; |
360 |
char frame_rate [128] = "25"; |
361 |
AVRational frame_rate_q; |
362 |
int ret;
|
363 |
|
364 |
if (args)
|
365 |
sscanf(args, "%127[^:]:%127[^:]:%127s", color_string, frame_size, frame_rate);
|
366 |
|
367 |
if (av_parse_video_size(&color->w, &color->h, frame_size) < 0) { |
368 |
av_log(ctx, AV_LOG_ERROR, "Invalid frame size: %s\n", frame_size);
|
369 |
return AVERROR(EINVAL);
|
370 |
} |
371 |
|
372 |
if (av_parse_video_rate(&frame_rate_q, frame_rate) < 0 || |
373 |
frame_rate_q.den <= 0 || frame_rate_q.num <= 0) { |
374 |
av_log(ctx, AV_LOG_ERROR, "Invalid frame rate: %s\n", frame_rate);
|
375 |
return AVERROR(EINVAL);
|
376 |
} |
377 |
color->time_base.num = frame_rate_q.den; |
378 |
color->time_base.den = frame_rate_q.num; |
379 |
|
380 |
if ((ret = av_parse_color(color->color, color_string, ctx)) < 0) |
381 |
return ret;
|
382 |
|
383 |
return 0; |
384 |
} |
385 |
|
386 |
static av_cold void color_uninit(AVFilterContext *ctx) |
387 |
{ |
388 |
ColorContext *color = ctx->priv; |
389 |
int i;
|
390 |
|
391 |
for (i = 0; i < 4; i++) { |
392 |
av_freep(&color->line[i]); |
393 |
color->line_step[i] = 0;
|
394 |
} |
395 |
} |
396 |
|
397 |
static int color_config_props(AVFilterLink *inlink) |
398 |
{ |
399 |
AVFilterContext *ctx = inlink->src; |
400 |
ColorContext *color = ctx->priv; |
401 |
uint8_t rgba_color[4];
|
402 |
int is_packed_rgba;
|
403 |
const AVPixFmtDescriptor *pix_desc = &av_pix_fmt_descriptors[inlink->format];
|
404 |
|
405 |
color->hsub = pix_desc->log2_chroma_w; |
406 |
color->vsub = pix_desc->log2_chroma_h; |
407 |
|
408 |
color->w &= ~((1 << color->hsub) - 1); |
409 |
color->h &= ~((1 << color->vsub) - 1); |
410 |
if (av_image_check_size(color->w, color->h, 0, ctx) < 0) |
411 |
return AVERROR(EINVAL);
|
412 |
|
413 |
memcpy(rgba_color, color->color, sizeof(rgba_color));
|
414 |
fill_line_with_color(color->line, color->line_step, color->w, color->color, |
415 |
inlink->format, rgba_color, &is_packed_rgba); |
416 |
|
417 |
av_log(ctx, AV_LOG_INFO, "w:%d h:%d r:%d/%d color:0x%02x%02x%02x%02x[%s]\n",
|
418 |
color->w, color->h, color->time_base.den, color->time_base.num, |
419 |
color->color[0], color->color[1], color->color[2], color->color[3], |
420 |
is_packed_rgba ? "rgba" : "yuva"); |
421 |
inlink->w = color->w; |
422 |
inlink->h = color->h; |
423 |
|
424 |
return 0; |
425 |
} |
426 |
|
427 |
static int color_request_frame(AVFilterLink *link) |
428 |
{ |
429 |
ColorContext *color = link->src->priv; |
430 |
AVFilterBufferRef *picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, color->w, color->h); |
431 |
picref->video->pixel_aspect = (AVRational) {1, 1}; |
432 |
picref->pts = av_rescale_q(color->pts++, color->time_base, AV_TIME_BASE_Q); |
433 |
picref->pos = 0;
|
434 |
|
435 |
avfilter_start_frame(link, avfilter_ref_buffer(picref, ~0));
|
436 |
draw_rectangle(picref, |
437 |
color->line, color->line_step, color->hsub, color->vsub, |
438 |
0, 0, color->w, color->h); |
439 |
avfilter_draw_slice(link, 0, color->h, 1); |
440 |
avfilter_end_frame(link); |
441 |
avfilter_unref_buffer(picref); |
442 |
|
443 |
return 0; |
444 |
} |
445 |
|
446 |
AVFilter avfilter_vsrc_color = { |
447 |
.name = "color",
|
448 |
.description = NULL_IF_CONFIG_SMALL("Provide an uniformly colored input, syntax is: [color[:size[:rate]]]"),
|
449 |
|
450 |
.priv_size = sizeof(ColorContext),
|
451 |
.init = color_init, |
452 |
.uninit = color_uninit, |
453 |
|
454 |
.query_formats = query_formats, |
455 |
|
456 |
.inputs = (AVFilterPad[]) {{ .name = NULL}},
|
457 |
|
458 |
.outputs = (AVFilterPad[]) {{ .name = "default",
|
459 |
.type = AVMEDIA_TYPE_VIDEO, |
460 |
.request_frame = color_request_frame, |
461 |
.config_props = color_config_props }, |
462 |
{ .name = NULL}},
|
463 |
}; |
464 |
|
465 |
#endif /* CONFIG_COLOR_FILTER */ |