ffplay视频输出和尺寸变换
我们开始分析视频(图像)的显示。205main函数主要步骤如下:SDL_Init,主要是SDL_INIT_VIDEO的⽀持SDL_CreateWindow,创建主窗⼝SDL_CreateRender,基于主窗⼝创建renderer,⽤于渲染输出。event_loop,播放控制事件响应循环,但也负责了video显示输出。这⾥我们重点分析set_default_window_size的原理,该函数主要
视频输出模块
视频输出初始化的主要流程
我们开始分析视频(图像)的显示。
因为使⽤了SDL,⽽video的显示也依赖SDL的窗⼝显示系统,所以先从main函数的SDL初始化看起(节选):
int main(int argc, char **argv)
{
// SDL初始化
flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
if (audio_disable)
flags &= ~SDL_INIT_AUDIO;
else {
/* Try to work around an occasional ALSA buffer underflow issue when the
* period size is NPOT due to ALSA resampling by forcing the buffer size. */
if (!SDL_getenv("SDL_AUDIO_ALSA_SET_BUFFER_SIZE"))
SDL_setenv("SDL_AUDIO_ALSA_SET_BUFFER_SIZE","1", 1);
}
if (display_disable)
flags &= ~SDL_INIT_VIDEO;
if (SDL_Init (flags)) {
av_log(NULL, AV_LOG_FATAL, "Could not initialize SDL - %s\n", SDL_GetError());
av_log(NULL, AV_LOG_FATAL, "(Did you set the DISPLAY variable?)\n");
exit(1);
}
SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
av_init_packet(&flush_pkt);
flush_pkt.data = (uint8_t *)&flush_pkt;
if (!display_disable) {
int flags = SDL_WINDOW_HIDDEN;
if (alwaysontop)
#if SDL_VERSION_ATLEAST(2,0,5)
flags |= SDL_WINDOW_ALWAYS_ON_TOP;
#else
av_log(NULL, AV_LOG_WARNING, "Your SDL version doesn't support SDL_WINDOW_ALWAYS_ON_TOP. Feature will be inactive.\n");
#endif
if (borderless)
flags |= SDL_WINDOW_BORDERLESS;
else
flags |= SDL_WINDOW_RESIZABLE;
// 创建窗口
window = SDL_CreateWindow(program_name, SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, default_width, default_height, flags);
SDL_SetHint(SDL_HINT_RENDER_SCALE_QUALITY, "linear");
if (window) {
// 创建renderer
renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
if (!renderer) {
av_log(NULL, AV_LOG_WARNING, "Failed to initialize a hardware accelerated renderer: %s\n", SDL_GetError());
renderer = SDL_CreateRenderer(window, -1, 0);
}
if (renderer) {
if (!SDL_GetRendererInfo(renderer, &renderer_info))
av_log(NULL, AV_LOG_VERBOSE, "Initialized %s renderer.\n", renderer_info.name);
}
}
if (!window || !renderer || !renderer_info.num_texture_formats) {
av_log(NULL, AV_LOG_FATAL, "Failed to create window or renderer: %s", SDL_GetError());
do_exit(NULL);
}
}
// 通过stream_open函数,开启read_thread读取线程
is = stream_open(input_filename, file_iformat);
if (!is) {
av_log(NULL, AV_LOG_FATAL, "Failed to initialize VideoState!\n");
do_exit(NULL);
}
// 事件响应
event_loop(is);
/* never returns */
return 0;
}
main函数主要步骤如下:
- SDL_Init,主要是SDL_INIT_VIDEO的⽀持
- SDL_CreateWindow,创建主窗⼝
- SDL_CreateRender,基于主窗⼝创建renderer,⽤于渲染输出。
- stream_open
- event_loop,播放控制事件响应循环,但也负责了video显示输出。
我们之前在讲read_thread线程时,讲到了:
// 从待处理流中获取相关参数,设置显示窗⼝的宽度、⾼度及宽⾼⽐
if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
AVStream *st = ic->streams[st_index[AVMEDIA_TYPE_VIDEO]];
AVCodecParameters *codecpar = st->codecpar;
// 根据流和帧宽⾼⽐猜测帧的样本宽⾼⽐。该值只是⼀个参考
AVRational sar = av_guess_sample_aspect_ratio(ic, st, NULL);
if (codecpar->width)
// 设置显示窗口的大小和宽高比
set_default_window_size(codecpar->width, codecpar->height, sar);
}
这⾥我们重点分析set_default_window_size的原理,该函数主要获取窗⼝的宽⾼,以及视频渲染的区域:
static void set_default_window_size(int width, int height, AVRational sar)
{
SDL_Rect rect;
// 确定是否指定窗口最大宽度
int max_width = screen_width ? screen_width : INT_MAX;
// 确定是否指定窗口最大高度
int max_height = screen_height ? screen_height : INT_MAX;
if (max_width == INT_MAX && max_height == INT_MAX)
max_height = height; // 没有制定最大高度时则使用视频高度
calculate_display_rect(&rect, 0, 0, max_width, max_height, width, height, sar);
default_width = rect.w;
default_height = rect.h;
}
screen_width和screen_height可以在ffplay启动时设置 -x screen_width -y screen_height获取指定的宽⾼,如果没有指定,则max_height = height,即是视频帧的⾼度。
重点在calculate_display_rect()函数。
初始化窗口显示大小
calculate_display_rect
根据传入的参数 (int scr_xleft, int scr_ytop, int scr_width, int scr_height, int pic_width, int pic_height, AVRational pic_sar)
获取显示区域的起始坐标和大小(rect)。
static void calculate_display_rect(SDL_Rect *rect,
int scr_xleft, int scr_ytop, int scr_width, int scr_height,
int pic_width, int pic_height, AVRational pic_sar)
{
AVRational aspect_ratio = pic_sar; // 比例
int64_t width, height, x, y;
if (av_cmp_q(aspect_ratio, av_make_q(0, 1)) <= 0)
aspect_ratio = av_make_q(1, 1); // 如果aspect_ratio是负数或者为0,设置为1:1
// 转成真正的播放比例
aspect_ratio = av_mul_q(aspect_ratio, av_make_q(pic_width, pic_height));
/* XXX: we suppose the screen has a 1.0 pixel ratio */
// 计算显示视频帧区域的宽高
// 先以高度为基准
height = scr_height;
// &~1,取偶数宽度
width = av_rescale(height, aspect_ratio.num, aspect_ratio.den) & ~1;
if (width > scr_width) {
// 当以高度为基准,发现计算出来的需要的窗口宽度不足时调整为以窗口宽度为基准
width = scr_width;
height = av_rescale(width, aspect_ratio.den, aspect_ratio.num) & ~1;
}
// 计算显示视频帧区域的起始坐标(在显示窗⼝内部的区域)
x = (scr_width - width) / 2;
y = (scr_height - height) / 2;
rect->x = scr_xleft + x;
rect->y = scr_ytop + y;
rect->w = FFMAX((int)width, 1);
rect->h = FFMAX((int)height, 1);
}
注意视频显示尺⼨的计算
<font style="color:rgb(38,38,38);">aspect_ratio = av_mul_q(aspect_ratio, av_make_q(pic_width, pic_height));</font>
计算出真正显示时需要的比例。
视频输出逻辑
main() -->
event_loop() -->
video_refresh() -->
video_display() -->
video_image_display() -->
upload_texture()
event_loop
开始处理 SDL 事件:
static void event_loop(VideoState *cur_stream)
{
SDL_Event event;
double incr, pos, frac;
for (;;) {
double x;
refresh_loop_wait_event(cur_stream, &event);
switch (event.type) {
......
case SDLK_SPACE:
toggle_pause(cur_stream);
break;
case SDLK_m:
toggle_mute(cur_stream);
break;
case SDLK_KP_MULTIPLY:
case SDLK_0:
update_volume(cur_stream, 1, SDL_VOLUME_STEP);
break;
case SDLK_KP_DIVIDE:
case SDLK_9:
update_volume(cur_stream, -1, SDL_VOLUME_STEP);
break;
case SDLK_s: // S: Step to next frame
step_to_next_frame(cur_stream);
break;
case SDLK_a:
stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
break;
case SDLK_v:
stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
break;
case SDLK_c:
stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
break;
case SDLK_t:
stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
break;
case SDLK_w:
#if CONFIG_AVFILTER
if (cur_stream->show_mode == SHOW_MODE_VIDEO && cur_stream->vfilter_idx < nb_vfilters - 1) {
if (++cur_stream->vfilter_idx >= nb_vfilters)
cur_stream->vfilter_idx = 0;
} else {
cur_stream->vfilter_idx = 0;
toggle_audio_display(cur_stream);
}
#else
toggle_audio_display(cur_stream);
#endif
break;
case SDLK_PAGEUP:
if (cur_stream->ic->nb_chapters <= 1) {
incr = 600.0;
goto do_seek;
}
seek_chapter(cur_stream, 1);
break;
case SDLK_PAGEDOWN:
if (cur_stream->ic->nb_chapters <= 1) {
incr = -600.0;
goto do_seek;
}
seek_chapter(cur_stream, -1);
break;
case SDLK_LEFT:
incr = seek_interval ? -seek_interval : -10.0;
goto do_seek;
case SDLK_RIGHT:
incr = seek_interval ? seek_interval : 10.0;
goto do_seek;
case SDLK_UP:
incr = 60.0;
goto do_seek;
case SDLK_DOWN:
incr = -60.0;
do_seek:
if (seek_by_bytes) {
pos = -1;
if (pos < 0 && cur_stream->video_stream >= 0)
pos = frame_queue_last_pos(&cur_stream->pictq);
if (pos < 0 && cur_stream->audio_stream >= 0)
pos = frame_queue_last_pos(&cur_stream->sampq);
if (pos < 0)
pos = avio_tell(cur_stream->ic->pb);
if (cur_stream->ic->bit_rate)
incr *= cur_stream->ic->bit_rate / 8.0;
else
incr *= 180000.0;
pos += incr;
stream_seek(cur_stream, pos, incr, 1);
} else {
pos = get_master_clock(cur_stream);
if (isnan(pos))
pos = (double)cur_stream->seek_pos / AV_TIME_BASE;
pos += incr;
if (cur_stream->ic->start_time != AV_NOPTS_VALUE && pos < cur_stream->ic->start_time / (double)AV_TIME_BASE)
pos = cur_stream->ic->start_time / (double)AV_TIME_BASE;
stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
}
break;
default:
break;
}
break;
case SDL_MOUSEBUTTONDOWN:
if (exit_on_mousedown) {
do_exit(cur_stream);
break;
}
if (event.button.button == SDL_BUTTON_LEFT) {
static int64_t last_mouse_left_click = 0;
if (av_gettime_relative() - last_mouse_left_click <= 500000) {
toggle_full_screen(cur_stream);
cur_stream->force_refresh = 1;
last_mouse_left_click = 0;
} else {
last_mouse_left_click = av_gettime_relative();
}
}
case SDL_MOUSEMOTION:
if (cursor_hidden) {
SDL_ShowCursor(1);
cursor_hidden = 0;
}
cursor_last_shown = av_gettime_relative();
if (event.type == SDL_MOUSEBUTTONDOWN) {
if (event.button.button != SDL_BUTTON_RIGHT)
break;
x = event.button.x;
} else {
if (!(event.motion.state & SDL_BUTTON_RMASK))
break;
x = event.motion.x;
}
if (seek_by_bytes || cur_stream->ic->duration <= 0) {
uint64_t size = avio_size(cur_stream->ic->pb);
stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
} else {
int64_t ts;
int ns, hh, mm, ss;
int tns, thh, tmm, tss;
tns = cur_stream->ic->duration / 1000000LL;
thh = tns / 3600;
tmm = (tns % 3600) / 60;
tss = (tns % 60);
frac = x / cur_stream->width;
ns = frac * tns;
hh = ns / 3600;
mm = (ns % 3600) / 60;
ss = (ns % 60);
av_log(NULL, AV_LOG_INFO,
"Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100,
hh, mm, ss, thh, tmm, tss);
ts = frac * cur_stream->ic->duration;
if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
ts += cur_stream->ic->start_time;
stream_seek(cur_stream, ts, 0, 0);
}
break;
case SDL_WINDOWEVENT:
switch (event.window.event) {
case SDL_WINDOWEVENT_SIZE_CHANGED:
screen_width = cur_stream->width = event.window.data1;
screen_height = cur_stream->height = event.window.data2;
if (cur_stream->vis_texture) {
SDL_DestroyTexture(cur_stream->vis_texture);
cur_stream->vis_texture = NULL;
}
case SDL_WINDOWEVENT_EXPOSED:
cur_stream->force_refresh = 1;
}
break;
case SDL_QUIT:
case FF_QUIT_EVENT:
do_exit(cur_stream);
break;
default:
break;
}
}
}
event_loop
的主要代码是一个主循环,主循环内执行:
- refresh_loop_wait_event
- 处理SDL事件队列中的事件。⽐如按空格键可以触发暂停/恢复,关闭窗⼝可以触发do_exit销毁播放现场。
video 显示主要在 <font style="color:rgb(38,38,38);">refresh_loop_wait_event</font>
:
static void refresh_loop_wait_event(VideoState *is, SDL_Event *event) {
// 休眠等待,remaining_time的计算在video_refresh中
double remaining_time = 0.0;
// 调⽤SDL_PeepEvents前先调⽤SDL_PumpEvents,将输⼊设备的事件抽到事件队列中
SDL_PumpEvents();
// SDL_PeepEvents check是否事件,⽐如⿏标移⼊显示区等
// 从事件队列中拿⼀个事件,放到event中,如果没有事件,则进⼊循环中
// SDL_PeekEvents⽤于读取事件,在调⽤该函数之前,必须调⽤SDL_PumpEvents搜集键盘等事件
while (!SDL_PeepEvents(event, 1, SDL_GETEVENT, SDL_FIRSTEVENT, SDL_LASTEVENT)) {
if (!cursor_hidden && av_gettime_relative() - cursor_last_shown > CURSOR_HIDE_DELAY) {
SDL_ShowCursor(0);
cursor_hidden = 1;
}
/*
* remaining_time就是⽤来进⾏⾳视频同步的。
* 在video_refresh函数中,根据当前帧显示时刻(display time)和实际时刻(actual time)
* 计算需要sleep的时间,保证帧按时显示
*/
if (remaining_time > 0.0) //sleep控制画⾯输出的时机
av_usleep((int64_t)(remaining_time * 1000000.0));
remaining_time = REFRESH_RATE;
if (is->show_mode != SHOW_MODE_NONE &&
(!is->paused || // ⾮暂停状态
is->force_refresh)) // ⾮强制刷新状态
video_refresh(is, &remaining_time);
SDL_PumpEvents();
}
}
参考资料:https://github.com/0voice
更多推荐
所有评论(0)