video.cpp 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702
  1. #include "video.h"
  2. #include "packet.h"
  3. #include "frame.h"
  4. #include "player.h"
  5. static int queue_picture(player_stat_t *is, AVFrame *src_frame, double pts, double duration, int64_t pos)
  6. {
  7. frame_t *vp = NULL;
  8. if (!(vp = frame_queue_peek_writable(&is->video_frm_queue))) {
  9. return -1;
  10. }
  11. vp->sar = src_frame->sample_aspect_ratio;
  12. vp->uploaded = 0;
  13. vp->width = src_frame->width;
  14. vp->height = src_frame->height;
  15. vp->format = src_frame->format;
  16. vp->pts = pts;
  17. vp->duration = duration;
  18. vp->pos = pos;
  19. //vp->serial = serial;
  20. //set_default_window_size(vp->width, vp->height, vp->sar);
  21. // 将AVFrame拷入队列相应位置
  22. av_frame_move_ref(vp->frame, src_frame);
  23. // 更新队列计数及写索引
  24. frame_queue_push(&is->video_frm_queue);
  25. return 0;
  26. }
  27. // 从packet_queue中取一个packet,解码生成frame
  28. static int video_decode_frame(AVCodecContext *p_codec_ctx, packet_queue_t *p_pkt_queue, AVFrame *frame, CMediaHostApi* hostapi)
  29. {
  30. int ret = -1;
  31. while (0 == p_pkt_queue->abort_flag)
  32. {
  33. AVPacket pkt = {0};
  34. av_init_packet(&pkt);
  35. while (0 == p_pkt_queue->abort_flag)
  36. {
  37. // 3. 从解码器接收frame
  38. // 3.1 一个视频packet含一个视频frame
  39. // 解码器缓存一定数量的packet后,才有解码后的frame输出
  40. // frame输出顺序是按pts的顺序,如IBBPBBP
  41. // frame->pkt_pos变量是此frame对应的packet在视频文件中的偏移地址,值同pkt.pos
  42. ret = avcodec_receive_frame(p_codec_ctx, frame);
  43. if (ret < 0)
  44. {
  45. if (ret == AVERROR_EOF)
  46. {
  47. hostapi->Debug(MEDIA_LOG_DEBUG, "video avcodec_receive_frame(): the decoder has been fully flushed.");
  48. avcodec_flush_buffers(p_codec_ctx);
  49. return 0;
  50. }
  51. else if (ret == AVERROR(EAGAIN))
  52. {
  53. //hostapi->Debug(MEDIA_LOG_DEBUG,"video avcodec_receive_frame(): output is not available in this state - " "user must try to send new input");
  54. break;
  55. }
  56. else
  57. {
  58. hostapi->Debug(MEDIA_LOG_DEBUG, "video avcodec_receive_frame(): other errors.");
  59. continue;
  60. }
  61. }
  62. else
  63. {
  64. frame->pts = frame->best_effort_timestamp;
  65. //frame->pts = frame->pkt_dts;
  66. return 1; // 成功解码得到一个视频帧,则返回
  67. }
  68. }
  69. // 1. 取出一个packet。使用pkt对应的serial赋值给d->pkt_serial
  70. if (packet_queue_get(p_pkt_queue, &pkt, true, hostapi) < 0){
  71. return -1;
  72. }
  73. if (pkt.data == NULL){
  74. // 复位解码器内部状态/刷新内部缓冲区。
  75. avcodec_flush_buffers(p_codec_ctx);
  76. }
  77. else{
  78. // 2. 将packet发送给解码器
  79. // 发送packet的顺序是按dts递增的顺序,如IPBBPBB
  80. // pkt.pos变量可以标识当前packet在视频文件中的地址偏移
  81. int isend_ret = -1;
  82. isend_ret = avcodec_send_packet(p_codec_ctx, &pkt);
  83. if (0 != isend_ret){
  84. if (AVERROR(EAGAIN) == isend_ret) {
  85. hostapi->Debug(MEDIA_LOG_DEBUG, "receive_frame and send_packet both returned EAGAIN, which is an API violation.");
  86. }
  87. else if (AVERROR_EOF == isend_ret) {
  88. hostapi->Debug(MEDIA_LOG_DEBUG, "the decoder has been flushed, and no new packets can be sent to it");
  89. }
  90. else if (AVERROR(EINVAL) == isend_ret) {
  91. hostapi->Debug(MEDIA_LOG_DEBUG, "codec not opened, it is an encoder, or requires flush");
  92. }
  93. else if (AVERROR(ENOMEM) == isend_ret) {
  94. hostapi->Debug(MEDIA_LOG_DEBUG, "failed to add packet to internal queue, or similar");
  95. }
  96. else {
  97. hostapi->Debug(MEDIA_LOG_DEBUG, "legitimate decoding errors and avcodec_send_packet result is %d.", isend_ret);
  98. }
  99. }
  100. av_packet_unref(&pkt);
  101. }
  102. }
  103. }
  104. // 将视频包解码得到视频帧,然后写入picture队列
  105. static int video_decode_thread(void *arg)
  106. {
  107. player_stat_t *is = (player_stat_t *)arg;
  108. AVFrame *p_frame = av_frame_alloc();
  109. double pts = 0.0;
  110. double duration = 0.0;
  111. int ret=0;
  112. int got_picture = 0;
  113. if (p_frame == NULL){
  114. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "av_frame_alloc() for p_frame failed.");
  115. return AVERROR(ENOMEM);
  116. }
  117. while (false == is->buser_stop)
  118. {
  119. AVRational tb = is->m_pvideo_stream[is->m_icurrent_index]->time_base;
  120. AVRational frame_rate = av_guess_frame_rate(is->m_pfmt_ctx[is->m_icurrent_index], is->m_pvideo_stream[is->m_icurrent_index], NULL);
  121. got_picture = video_decode_frame(is->m_pvcodec_ctx[is->m_icurrent_index], &is->video_pkt_queue, p_frame, is->rvc_hostapi);
  122. if (got_picture < 0){
  123. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "video_decode_frame < 0, goto end and set video_finished flag to true.");
  124. goto exit;
  125. }
  126. AVRational tbdata = { frame_rate.den, frame_rate.num };
  127. duration = (frame_rate.num && frame_rate.den ? av_q2d(tbdata) : 0); // 当前帧播放时长
  128. //duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0); // 当前帧播放时长
  129. pts = (p_frame->pts == AV_NOPTS_VALUE) ? NAN : p_frame->pts * av_q2d(tb); // 当前帧显示时间戳
  130. ret = queue_picture(is, p_frame, pts, duration, p_frame->pkt_pos); // 将当前帧压入frame_queue
  131. av_frame_unref(p_frame);
  132. if (ret < 0){
  133. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "queue_picture return -1, goto end and set video_finished flag to true.");
  134. goto exit;
  135. }
  136. else{
  137. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "queue_picture success!");
  138. }
  139. }
  140. exit:
  141. av_frame_free(&p_frame);
  142. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "video decode thread exit, thread id is %u, and user_stop flag is %s.", SDL_ThreadID(), is->buser_stop ? "true" : "false");
  143. is->m_bvideo_decode_finished = true;
  144. return 0;
  145. }
  146. // 根据视频时钟与同步时钟(如音频时钟)的差值,校正delay值,使视频时钟追赶或等待同步时钟
  147. // 输入参数delay是上一帧播放时长,即上一帧播放后应延时多长时间后再播放当前帧,通过调节此值来调节当前帧播放快慢
  148. // 返回值delay是将输入参数delay经校正后得到的值
  149. static double compute_target_delay(double delay, player_stat_t *is)
  150. {
  151. double sync_threshold, diff = 0;
  152. /* update delay to follow master synchronisation source */
  153. /* if video is slave, we try to correct big delays by
  154. duplicating or deleting a frame */
  155. // 视频时钟与同步时钟(如音频时钟)的差异,时钟值是上一帧pts值(实为:上一帧pts + 上一帧至今流逝的时间差)
  156. diff = get_clock(&is->video_clk) - get_clock(&is->audio_clk);
  157. // delay是上一帧播放时长:当前帧(待播放的帧)播放时间与上一帧播放时间差理论值
  158. // diff是视频时钟与同步时钟的差值
  159. /* skip or repeat frame. We take into account the
  160. delay to compute the threshold. I still don't know
  161. if it is the best guess */
  162. // 若delay < AV_SYNC_THRESHOLD_MIN,则同步域值为AV_SYNC_THRESHOLD_MIN
  163. // 若delay > AV_SYNC_THRESHOLD_MAX,则同步域值为AV_SYNC_THRESHOLD_MAX
  164. // 若AV_SYNC_THRESHOLD_MIN < delay < AV_SYNC_THRESHOLD_MAX,则同步域值为delay
  165. sync_threshold = FFMAX(AV_SYNC_THRESHOLD_MIN, FFMIN(AV_SYNC_THRESHOLD_MAX, delay));
  166. if (!isnan(diff))
  167. {
  168. if (diff <= -sync_threshold) // 视频时钟落后于同步时钟,且超过同步域值
  169. delay = FFMAX(0, delay + diff); // 当前帧播放时刻落后于同步时钟(delay+diff<0)则delay=0(视频追赶,立即播放),否则delay=delay+diff
  170. else if (diff >= sync_threshold && delay > AV_SYNC_FRAMEDUP_THRESHOLD) // 视频时钟超前于同步时钟,且超过同步域值,但上一帧播放时长超长
  171. delay = delay + diff; // 仅仅校正为delay=delay+diff,主要是AV_SYNC_FRAMEDUP_THRESHOLD参数的作用
  172. else if (diff >= sync_threshold) // 视频时钟超前于同步时钟,且超过同步域值
  173. delay = 2 * delay; // 视频播放要放慢脚步,delay扩大至2倍
  174. }
  175. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "video: delay=%0.3f A-V=%f", delay, -diff);
  176. return delay;
  177. }
  178. static double vp_duration(player_stat_t *is, frame_t *vp, frame_t *nextvp)
  179. {
  180. if (vp->serial == nextvp->serial){
  181. double duration = nextvp->pts - vp->pts;
  182. if (isnan(duration) || duration <= 0)
  183. return vp->duration;
  184. else
  185. return duration;
  186. }
  187. else {
  188. return 0.0;
  189. }
  190. }
  191. static void update_video_pts(player_stat_t *is, double pts, int64_t pos, int serial) {
  192. /* update current video pts */
  193. set_clock(&is->video_clk, pts, serial); // 更新vidclock
  194. //-sync_clock_to_slave(&is->extclk, &is->vidclk); // 将extclock同步到vidclock
  195. }
  196. static void video_display(player_stat_t *is)
  197. {
  198. frame_t *vp = NULL;
  199. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "%s:%d", __FUNCTION__, __LINE__);
  200. vp = frame_queue_peek_last(&is->video_frm_queue);
  201. if (0 == vp->frame->height || 0 == vp->frame->width){
  202. return;
  203. }
  204. // 图像转换:p_frm_raw->data ==> p_frm_yuv->data
  205. // 将源图像中一片连续的区域经过处理后更新到目标图像对应区域,处理的图像区域必须逐行连续
  206. // plane: 如YUV有Y、U、V三个plane,RGB有R、G、B三个plane
  207. // slice: 图像中一片连续的行,必须是连续的,顺序由顶部到底部或由底部到顶部
  208. // stride/pitch: 一行图像所占的字节数,Stride=BytesPerPixel*Width+Padding,注意对齐
  209. // AVFrame.*data[]: 每个数组元素指向对应plane
  210. // AVFrame.linesize[]: 每个数组元素表示对应plane中一行图像所占的字节数
  211. sws_scale(is->m_pimg_convert_ctx[is->m_icurrent_index], // sws context
  212. (const uint8_t *const *)vp->frame->data, // src slice
  213. vp->frame->linesize, // src stride
  214. 0, // src slice y
  215. is->m_pvcodec_ctx[is->m_icurrent_index]->height, // src slice height
  216. is->m_pfrm_yuv[is->m_icurrent_index]->data, // dst planes
  217. is->m_pfrm_yuv[is->m_icurrent_index]->linesize // dst strides
  218. );
  219. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "before SDL_WaitEvent %s:%d", __FUNCTION__, __LINE__);
  220. //SDL_Event rvcevent;
  221. //SDL_WaitEvent(&rvcevent);
  222. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "after SDL_WaitEvent %s:%d", __FUNCTION__, __LINE__);
  223. //if(REFRESH_EVENT == rvcevent.type){
  224. //SDL_ShowWindow(is->sdl_video.window);
  225. // 使用新的YUV像素数据更新SDL_Rect
  226. SDL_UpdateYUVTexture(is->sdl_video.texture, // sdl texture
  227. &is->sdl_video.rect, // sdl rect
  228. is->m_pfrm_yuv[is->m_icurrent_index]->data[0], // y plane
  229. is->m_pfrm_yuv[is->m_icurrent_index]->linesize[0], // y pitch
  230. is->m_pfrm_yuv[is->m_icurrent_index]->data[1], // u plane
  231. is->m_pfrm_yuv[is->m_icurrent_index]->linesize[1], // u pitch
  232. is->m_pfrm_yuv[is->m_icurrent_index]->data[2], // v plane
  233. is->m_pfrm_yuv[is->m_icurrent_index]->linesize[2] // v pitch
  234. );
  235. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "SDL_UpdateYUVTexture %s:%d", __FUNCTION__, __LINE__);
  236. // 使用特定颜色清空当前渲染目标
  237. SDL_RenderClear(is->sdl_video.renderer);
  238. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "SDL_RenderClear %s:%d", __FUNCTION__, __LINE__);
  239. // 使用部分图像数据(texture)更新当前渲染目标
  240. SDL_RenderCopy(is->sdl_video.renderer, // sdl renderer
  241. is->sdl_video.texture, // sdl texture
  242. NULL, // src rect, if NULL copy texture
  243. &is->sdl_video.rect // dst rect
  244. );
  245. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "SDL_RenderCopy %s:%d", __FUNCTION__, __LINE__);
  246. // 执行渲染,更新屏幕显示
  247. SDL_RenderPresent(is->sdl_video.renderer);
  248. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "SDL_RenderPresent %s:%d", __FUNCTION__, __LINE__);
  249. SDL_Delay(1);
  250. //}
  251. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "%s:%d", __FUNCTION__, __LINE__);
  252. }
  253. /* called to display each frame */
  254. static void video_refresh(void *opaque, double *remaining_time)
  255. {
  256. player_stat_t *is = (player_stat_t *)opaque;
  257. double time;
  258. static bool first_frame = true;
  259. retry:
  260. if (frame_queue_nb_remaining(&is->video_frm_queue) == 0) // 所有帧已显示
  261. {
  262. // nothing to do, no picture to display in the queue
  263. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "%s:%d, nothing to do, no picture to display in the queue.", __FUNCTION__, __LINE__);
  264. av_usleep(100*1000);
  265. return;
  266. }
  267. double last_duration, duration, delay;
  268. frame_t *vp, *lastvp;
  269. /* dequeue the picture */
  270. lastvp = frame_queue_peek_last(&is->video_frm_queue); // 上一帧:上次已显示的帧
  271. vp = frame_queue_peek(&is->video_frm_queue); // 当前帧:当前待显示的帧
  272. // lastvp和vp不是同一播放序列(一个seek会开始一个新播放序列),将frame_timer更新为当前时间
  273. if (first_frame){
  274. is->frame_timer = av_gettime_relative() / 1000000.0;
  275. first_frame = false;
  276. }
  277. // 暂停处理:不停播放上一帧图像
  278. if (is->m_ipaused){
  279. goto display;
  280. }
  281. /* compute nominal last_duration */
  282. last_duration = vp_duration(is, lastvp, vp); // 上一帧播放时长:vp->pts - lastvp->pts
  283. delay = compute_target_delay(last_duration, is); // 根据视频时钟和同步时钟的差值,计算delay值
  284. time= av_gettime_relative()/1000000.0;
  285. // 当前帧播放时刻(is->frame_timer+delay)大于当前时刻(time),表示播放时刻未到
  286. if (time < is->frame_timer + delay) {
  287. // 播放时刻未到,则更新刷新时间remaining_time为当前时刻到下一播放时刻的时间差
  288. *remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time);
  289. // 播放时刻未到,则不播放,直接返回
  290. return;
  291. }
  292. // 更新frame_timer值
  293. is->frame_timer += delay;
  294. // 校正frame_timer值:若frame_timer落后于当前系统时间太久(超过最大同步域值),则更新为当前系统时间
  295. if (delay > 0 && time - is->frame_timer > AV_SYNC_THRESHOLD_MAX){
  296. is->frame_timer = time;
  297. }
  298. SDL_LockMutex(is->video_frm_queue.frame_mutex);
  299. if (!isnan(vp->pts)){
  300. update_video_pts(is, vp->pts, vp->pos, vp->serial); // 更新视频时钟:时间戳、时钟时间
  301. }
  302. SDL_UnlockMutex(is->video_frm_queue.frame_mutex);
  303. // 是否要丢弃未能及时播放的视频帧
  304. if (frame_queue_nb_remaining(&is->video_frm_queue) > 1) // 队列中未显示帧数>1(只有一帧则不考虑丢帧)
  305. {
  306. frame_t *nextvp = frame_queue_peek_next(&is->video_frm_queue); // 下一帧:下一待显示的帧
  307. duration = vp_duration(is, vp, nextvp); // 当前帧vp播放时长 = nextvp->pts - vp->pts
  308. // 当前帧vp未能及时播放,即下一帧播放时刻(is->frame_timer+duration)小于当前系统时刻(time)
  309. if (time > is->frame_timer + duration){
  310. frame_queue_next(&is->video_frm_queue); // 删除上一帧已显示帧,即删除lastvp,读指针加1(从lastvp更新到vp)
  311. goto retry;
  312. }
  313. }
  314. // 删除当前读指针元素,读指针+1。若未丢帧,读指针从lastvp更新到vp;若有丢帧,读指针从vp更新到nextvp
  315. frame_queue_next(&is->video_frm_queue);
  316. display:
  317. video_display(is); // 取出当前帧vp(若有丢帧是nextvp)进行播放
  318. }
  319. static uint32_t get_video_playing_wind_flag(m_eWindType_t eType)
  320. {
  321. uint32_t uFlag = SDL_WINDOW_BORDERLESS | SDL_WINDOW_OPENGL | SDL_WINDOW_ALWAYS_ON_TOP | SDL_WINDOW_SKIP_TASKBAR | SDL_WINDOW_POPUP_MENU | SDL_WINDOW_SHOWN;
  322. #ifndef _WIN32
  323. uFlag = SDL_WINDOW_OPENGL|SDL_WINDOW_BORDERLESS|SDL_WINDOW_ALWAYS_ON_TOP|SDL_WINDOW_POPUP_MENU;
  324. #endif
  325. return uFlag;
  326. }
  327. static int video_playing_thread(void *arg)
  328. {
  329. player_stat_t *is = (player_stat_t *)arg;
  330. double remaining_time = 0.0;
  331. uint32_t uWindFlag = get_video_playing_wind_flag(is->m_eWindType);
  332. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "%s:%d %d %d %d %d", __FUNCTION__, __LINE__,
  333. is->iDisplayCx,
  334. is->iDisplayCy,
  335. is->sdl_video.rect.w,
  336. is->sdl_video.rect.h);
  337. // 1. 创建SDL窗口,SDL 2.0支持多窗口
  338. // SDL_Window即运行程序后弹出的视频窗口,同SDL 1.x中的SDL_Surface
  339. is->sdl_video.window = SDL_CreateWindow("player",
  340. is->iDisplayCx,
  341. is->iDisplayCy,
  342. is->sdl_video.rect.w,
  343. is->sdl_video.rect.h,
  344. uWindFlag
  345. );
  346. if (is->sdl_video.window == NULL) {
  347. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_CreateWindow() failed: %s.", SDL_GetError());
  348. return -1;
  349. }
  350. else {
  351. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "SDL Create Window success.");
  352. //SDL_HideWindow(is->sdl_video.window);
  353. #ifdef _WIN32
  354. SDL_SysWMinfo info;
  355. HWND hwnd;
  356. SDL_VERSION(&info.version);
  357. if (SDL_GetWindowWMInfo(is->sdl_video.window, &info)) {
  358. hwnd = info.info.win.window;
  359. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "SDL_GetWindowWMInfo success.");
  360. SetWindowPos(hwnd,
  361. HWND_TOPMOST,
  362. is->iDisplayCx,
  363. is->iDisplayCy,
  364. is->sdl_video.rect.w,
  365. is->sdl_video.rect.h,
  366. SWP_NOMOVE | SWP_NOSIZE);
  367. }
  368. else {
  369. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "SDL_GetWindowWMInfo failed.");
  370. }
  371. #else
  372. #endif // _WIN32
  373. }
  374. int cx = 0, cy = 0;
  375. SDL_GetWindowPosition(is->sdl_video.window, &cx, &cy);
  376. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "window flag is 0x%08x, cx = %d, cy = %d.", SDL_GetWindowFlags(is->sdl_video.window), cx, cy);
  377. // 2. 创建SDL_Renderer
  378. // SDL_Renderer:渲染器
  379. int iNum = SDL_GetNumRenderDrivers();
  380. int iRenderindex = -1;
  381. static bool blog = true;
  382. if (blog) {
  383. is->rvc_hostapi->Debug(MEDIA_LOG_INFO, "SDL_GetNumRenderDrivers %d.", iNum);
  384. }
  385. for (int index = 0; index < iNum; index++){
  386. SDL_RendererInfo info = {0};
  387. SDL_GetRenderDriverInfo(index, &info);
  388. if (blog) {
  389. is->rvc_hostapi->Debug(MEDIA_LOG_INFO, "%d render driver name is %s.", index, info.name);
  390. }
  391. #ifdef _WIN32
  392. #else
  393. if (strstr(info.name, "software")) {
  394. iRenderindex = index;
  395. }
  396. #endif // _WIN32
  397. }
  398. //SDL_RendererFlags
  399. is->sdl_video.renderer = SDL_CreateRenderer(is->sdl_video.window, iRenderindex,
  400. SDL_RENDERER_TARGETTEXTURE |
  401. SDL_RENDERER_PRESENTVSYNC |
  402. SDL_RENDERER_ACCELERATED);
  403. if (NULL == is->sdl_video.renderer){
  404. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_CreateRenderer() failed: %s", SDL_GetError());
  405. return -1;
  406. }
  407. SDL_RendererInfo RenderInfo;
  408. SDL_GetRendererInfo(is->sdl_video.renderer, &RenderInfo);
  409. if (blog) {
  410. is->rvc_hostapi->Debug(MEDIA_LOG_INFO, "render driver name is %s.", RenderInfo.name);
  411. is->rvc_hostapi->Debug(MEDIA_LOG_INFO, "render flag is %d.", RenderInfo.flags);
  412. }
  413. blog = false;
  414. // 3. 创建SDL_Texture
  415. // 一个SDL_Texture对应一帧YUV数据,同SDL 1.x中的SDL_Overlay
  416. is->sdl_video.texture = SDL_CreateTexture(is->sdl_video.renderer,
  417. SDL_PIXELFORMAT_IYUV,
  418. SDL_TEXTUREACCESS_STATIC,
  419. is->sdl_video.rect.w,
  420. is->sdl_video.rect.h
  421. );
  422. if (NULL == is->sdl_video.texture){
  423. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_CreateTexture() failed: %s", SDL_GetError());
  424. return -1;
  425. }
  426. SDL_ShowWindow(is->sdl_video.window);
  427. while ((false == is->buser_stop) && (false == is->m_bvideo_decode_finished)){
  428. if (remaining_time > 0.0){
  429. av_usleep((unsigned)(remaining_time * 1000000.0));
  430. }
  431. remaining_time = REFRESH_RATE;
  432. // 立即显示当前帧,或延时remaining_time后再显示
  433. video_refresh(is, &remaining_time);
  434. //remaining_time += 0.020;
  435. //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "%s:%d, remaining_time = %f.", __FUNCTION__, __LINE__, remaining_time);
  436. SDL_Event event;
  437. while(SDL_PollEvent(&event))
  438. {
  439. switch(event.type)
  440. {
  441. case SDL_QUIT:
  442. break;
  443. }
  444. }
  445. }
  446. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "video playing thread exit, thread id is %u, and user_stop flag is %s.", SDL_ThreadID(), is->buser_stop ? "true" : "false");
  447. return 0;
  448. }
  449. static int open_video_playing(void* arg)
  450. {
  451. player_stat_t* is = (player_stat_t*)arg;
  452. int iret = -1;
  453. int buf_size = 0;
  454. uint8_t* buffer = NULL;
  455. //SDL_Surface* IconSurface = NULL;
  456. for (size_t index = 0; index < is->m_uFilesCount; index++) {
  457. is->m_pfrm_yuv[index] = av_frame_alloc();
  458. if (NULL == is->m_pfrm_yuv[index]) {
  459. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "av_frame_alloc() for p_frm_raw failed");
  460. return iret;
  461. }
  462. int iplay_video_width = 0;
  463. if (NULL != is->m_pvcodec_ctx[index]){
  464. iplay_video_width = is->m_pvcodec_ctx[index]->width;
  465. }
  466. int iplay_video_height = 0;
  467. if (NULL != is->m_pvcodec_ctx[index]){
  468. iplay_video_height = is->m_pvcodec_ctx[index]->height;
  469. }
  470. if (eFullScreen_Type == is->m_eWindType || eSpecified_Type == is->m_eWindType) {
  471. iplay_video_width = is->iDisplayWidth;
  472. iplay_video_height = is->iDisplayHeight;
  473. }
  474. // 为AVFrame.*data[]手工分配缓冲区,用于存储sws_scale()中目的帧视频数据
  475. buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
  476. iplay_video_width,
  477. iplay_video_height,
  478. 1
  479. );
  480. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "av_image_get_buffer_size is %d.", buf_size);
  481. // buffer将作为p_frm_yuv的视频数据缓冲区
  482. buffer = (uint8_t*)av_malloc(buf_size);
  483. if (NULL == buffer) {
  484. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "av_malloc() for buffer failed!");
  485. return iret;
  486. }
  487. is->m_pvideo_buffer[index] = buffer;
  488. // 使用给定参数设定m_pfrm_yuv->data和m_pfrm_yuv->linesize
  489. iret = av_image_fill_arrays(is->m_pfrm_yuv[index]->data, // dst data[]
  490. is->m_pfrm_yuv[index]->linesize, // dst linesize[]
  491. is->m_pvideo_buffer[index], // src buffer
  492. AV_PIX_FMT_YUV420P, // pixel format
  493. iplay_video_width, // width
  494. iplay_video_height, // height
  495. 1 // align
  496. );
  497. if (iret < 0) {
  498. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "av_image_fill_arrays() failed %d", iret);
  499. return iret;
  500. }
  501. // A2. 初始化SWS context,用于后续图像转换
  502. // 此处第6个参数使用的是FFmpeg中的像素格式,对比参考注释B3
  503. // FFmpeg中的像素格式AV_PIX_FMT_YUV420P对应SDL中的像素格式SDL_PIXELFORMAT_IYUV
  504. // 如果解码后得到图像的不被SDL支持,不进行图像转换的话,SDL是无法正常显示图像的
  505. // 如果解码后得到图像的能被SDL支持,则不必进行图像转换
  506. // 这里为了编码简便,统一转换为SDL支持的格式AV_PIX_FMT_YUV420P==>SDL_PIXELFORMAT_IYUV
  507. is->m_pimg_convert_ctx[index] = sws_getContext(is->m_pvcodec_ctx[index]->width, // src width
  508. is->m_pvcodec_ctx[index]->height, // src height
  509. is->m_pvcodec_ctx[index]->pix_fmt, // src format
  510. iplay_video_width, // dst width
  511. iplay_video_height, // dst height
  512. AV_PIX_FMT_YUV420P, // dst format
  513. SWS_BICUBIC, // flags
  514. NULL, // src filter
  515. NULL, // dst filter
  516. NULL // param
  517. );
  518. if (NULL == is->m_pimg_convert_ctx[index]) {
  519. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "sws_getContext() failed.");
  520. return iret;
  521. }
  522. else {
  523. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "%s:%d is->m_pimg_convert_ctx[%d] = 0x%08x.", __FUNCTION__, __LINE__, index, is->m_pimg_convert_ctx[index]);
  524. }
  525. // SDL_Rect赋值
  526. is->sdl_video.rect.x = 0;
  527. is->sdl_video.rect.y = 0;
  528. is->sdl_video.rect.w = iplay_video_width;
  529. is->sdl_video.rect.h = iplay_video_height;
  530. is->m_video_playing_tid = SDL_CreateThread(video_playing_thread, "video playing thread", is);
  531. if (NULL == is->m_video_playing_tid) {
  532. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_Create video playing thread failed: %s.", SDL_GetError());
  533. return iret;
  534. }
  535. else {
  536. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "create %s success, and thread id is %u.", SDL_GetThreadName(is->m_video_playing_tid), SDL_GetThreadID(is->m_video_playing_tid));
  537. iret = 0;
  538. }
  539. }
  540. return iret;
  541. }
  542. static int open_video_stream(player_stat_t *is)
  543. {
  544. AVCodecParameters* p_codec_par = NULL;
  545. AVCodec* p_codec = NULL;
  546. AVCodecContext* p_codec_ctx = NULL;
  547. int iret = -1;
  548. for (size_t index = 0; index < is->m_uFilesCount; index++){
  549. AVStream* p_stream = is->m_pvideo_stream[index];
  550. // 1. 为视频流构建解码器AVCodecContext
  551. // 1.1 获取解码器参数AVCodecParameters
  552. p_codec_par = p_stream->codecpar;
  553. // 1.2 获取解码器
  554. p_codec = avcodec_find_decoder(p_codec_par->codec_id);
  555. if (p_codec == NULL) {
  556. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "can not find codec!");
  557. return iret;
  558. }
  559. // 1.3 构建解码器AVCodecContext
  560. // 1.3.1 p_codec_ctx初始化:分配结构体,使用p_codec初始化相应成员为默认值
  561. p_codec_ctx = avcodec_alloc_context3(p_codec);
  562. if (p_codec_ctx == NULL) {
  563. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "avcodec_alloc_context3() failed");
  564. return iret;
  565. }
  566. // 1.3.2 p_codec_ctx初始化:p_codec_par ==> p_codec_ctx,初始化相应成员
  567. iret = avcodec_parameters_to_context(p_codec_ctx, p_codec_par);
  568. if (iret < 0) {
  569. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "avcodec_parameters_to_context() failed");
  570. avcodec_close(p_codec_ctx);
  571. avcodec_free_context(&p_codec_ctx);
  572. return iret;
  573. }
  574. // 1.3.3 p_codec_ctx初始化:使用p_codec初始化p_codec_ctx,初始化完成
  575. iret = avcodec_open2(p_codec_ctx, p_codec, NULL);
  576. if (iret < 0) {
  577. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "avcodec_open2() failed %d", iret);
  578. avcodec_close(p_codec_ctx);
  579. avcodec_free_context(&p_codec_ctx);
  580. return iret;
  581. }
  582. is->m_pvcodec_ctx[index] = p_codec_ctx;
  583. }
  584. // 2. 创建视频解码线程
  585. is->m_video_decode_tid = SDL_CreateThread(video_decode_thread, "video decode thread", is);
  586. if (NULL == is->m_video_decode_tid) {
  587. is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_Create video decode thread failed: %s.", SDL_GetError());
  588. return iret;
  589. }
  590. else {
  591. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "create %s success, and thread id is %u.", SDL_GetThreadName(is->m_video_decode_tid), SDL_GetThreadID(is->m_video_decode_tid));
  592. iret = 0;
  593. }
  594. return iret;
  595. }
  596. int open_video(player_stat_t *is)
  597. {
  598. int iret = -1;
  599. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "enter open_video()");
  600. if (0 == open_video_stream(is)) {
  601. iret = open_video_playing(is);
  602. }
  603. is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "exit open_video()");
  604. return iret;
  605. }