videocapture_linux.cpp 29 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144
  1. #include"videocapture_linux.h"
  2. #include "../../libvideoframework/videoutil.h"
  3. #include "../../libvideoframework/aligned_malloc.h"
  4. #include <stdlib.h>
  5. #include <string.h>
  6. #include <errno.h>
  7. #include <fcntl.h>
  8. #include <linux/videodev2.h>
  9. #include <stdio.h>
  10. #include <string.h>
  11. #include <sys/ioctl.h>
  12. #include <sys/mman.h>
  13. #include <sys/stat.h>
  14. #include <unistd.h>
  15. #include <memory>
  16. #ifdef __cplusplus
  17. extern "C" {
  18. #endif
  19. #include <libavcodec/avcodec.h>
  20. #include <libavformat/avformat.h>
  21. #include <libswscale/swscale.h>
  22. #ifdef __cplusplus
  23. }
  24. #endif
  25. static const int kBufferAlignment = 64;
  26. // Get FourCC code as a string.
  27. int GetFourccName(char* strbuf, uint32_t ulen, uint32_t fourcc)
  28. {
  29. int iret = -1;
  30. if (NULL == strbuf) {
  31. return iret;
  32. }
  33. for (uint32_t i = 0; i < sizeof(uint32_t) && i < ulen; i++) {
  34. uint32_t uindex = i * 8;
  35. strbuf[i] = (fourcc >> uindex) & 0xFF;
  36. }
  37. iret = 0;
  38. return iret;
  39. }
  40. VideoCaptureImpl::VideoCaptureImpl(videocap_callback_t* pCallback)
  41. {
  42. memcpy(&m_callback, pCallback, sizeof(videocap_callback_t));
  43. m_capture = NULL;
  44. m_bCaptureStarted = false;
  45. m_deviceId = -1;
  46. m_deviceFd = -1;
  47. m_in_cap_width = 0;
  48. m_in_cap_height = 0;
  49. m_real_cap_width = 0;
  50. m_real_cap_height = 0;
  51. m_out_cap_width = 0;
  52. m_out_cap_height = 0;
  53. m_rotate = libyuv::kRotate0;
  54. m_frame_fmt = VIDEO_FORMAT_I420;
  55. m_captureVideoType = VideoType::kI420;
  56. m_currentFrameRate = -1;
  57. m_buffersAllocatedByDevice = -1;
  58. m_pool = NULL;
  59. m_CaptureThreadId = 0;
  60. m_bStopCapture = false;
  61. m_i420 = NULL;
  62. m_opti420 = NULL;
  63. m_rgb24 = NULL;
  64. m_iminbrightness = 0;
  65. m_imaxbrightness = 0;
  66. m_ilogcount = 0;
  67. }
  68. VideoCaptureImpl::~VideoCaptureImpl()
  69. {
  70. m_ilogcount = 0;
  71. m_bCaptureStarted = false;
  72. m_bStopCapture = false;
  73. StopVideoCapture();
  74. if (m_deviceFd != -1) {
  75. close(m_deviceFd);
  76. }
  77. if (NULL != m_capture){
  78. free(m_capture);
  79. m_capture = NULL;
  80. }
  81. }
  82. int VideoCaptureImpl::VideoCaptureSetParam(videocap_param_t* param)
  83. {
  84. /* check param */
  85. if (NULL == param) {
  86. return -1;
  87. }
  88. if (param->cap_mode < 0 || param->cap_mode >= VIDEOCAP_MAX_MODE) {
  89. return -1;
  90. }
  91. if (param->frame_fmt != VIDEO_FORMAT_I420 && param->frame_fmt != VIDEO_FORMAT_RGB24) {
  92. return -1;
  93. }
  94. if (param->fps < 1.0 || param->fps > 50.0) {
  95. return -1;
  96. }
  97. if (param->pre_hwnd){
  98. if (param->pre_width < 0 || param->pre_height < 0) {
  99. return -1;
  100. }
  101. }
  102. if (param->dev_id >= 0) {
  103. m_deviceId = param->dev_id;
  104. }
  105. else {
  106. return -1;
  107. }
  108. if (param->frame_fmt == VIDEO_FORMAT_I420 && !(param->option & VIDEOCAP_OPT_EANBLE_RESIZE)) {
  109. param->res_mode = param->cap_mode;
  110. param->option |= VIDEOCAP_OPT_EANBLE_RESIZE;
  111. }
  112. if (param->option & VIDEOCAP_OPT_EANBLE_RESIZE) {
  113. if (param->res_mode < VIDEOCAP_FRAME_SQCIF || param->res_mode > VIDEOCAP_FRAME_SVGA) {
  114. return -1;
  115. }
  116. }
  117. else {
  118. //CapLog("%s", "param->option & VIDEOCAP_OPT_EANBLE_RESIZE success.");
  119. }
  120. m_capture = (videocap_t*)malloc(sizeof(videocap_t));
  121. if (!m_capture) {
  122. return -1;
  123. }
  124. memset((void*)m_capture, 0, sizeof(videocap_t));
  125. memcpy(&m_capture->param, param, sizeof(videocap_param_t));
  126. if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
  127. int width = mode_width[param->cap_mode];
  128. int height = mode_height[param->cap_mode];
  129. if (video_frame_alloc(width, height, param->frame_fmt, &m_capture->cap_frame) != 0) {
  130. free(m_capture);
  131. return -1;
  132. }
  133. video_frame_fill_black(&m_capture->cap_frame);
  134. }
  135. if (param->option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB) {
  136. }
  137. if (param->option & VIDEOCAP_OPT_EANBLE_RESIZE) {
  138. int width = mode_width[param->res_mode];
  139. int height = mode_height[param->res_mode];
  140. if (video_frame_alloc(width, height, param->frame_fmt, &m_capture->res_frame) != 0) {
  141. if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
  142. video_frame_free(&m_capture->res_frame);
  143. }
  144. free(m_capture);
  145. return -1;
  146. }
  147. video_frame_fill_black(&m_capture->res_frame);
  148. m_capture->sws_context = sws_getContext(mode_width[param->cap_mode],
  149. mode_height[param->cap_mode],
  150. AV_PIX_FMT_BGR24,
  151. mode_width[param->res_mode],
  152. mode_height[param->res_mode],
  153. m_capture->param.frame_fmt == VIDEO_FORMAT_RGB24 ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_YUV420P,
  154. SWS_FAST_BILINEAR,
  155. NULL,
  156. NULL,
  157. NULL);
  158. if (!m_capture->sws_context) {
  159. video_frame_free(&m_capture->res_frame);
  160. if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
  161. video_frame_free(&m_capture->cap_frame);
  162. }
  163. free(m_capture);
  164. return -1;
  165. }
  166. }
  167. m_rotate = RotateTrans(param->irotate);
  168. m_in_cap_width = m_out_cap_width = mode_width[m_capture->param.cap_mode];
  169. m_in_cap_height = m_out_cap_height = mode_height[m_capture->param.cap_mode];
  170. if (libyuv::kRotate90 == m_rotate || libyuv::kRotate270 == m_rotate){
  171. m_out_cap_width = mode_height[m_capture->param.cap_mode];
  172. m_out_cap_height = mode_width[m_capture->param.cap_mode];
  173. }
  174. return 0;
  175. }
  176. int ConvertVideoType(VideoType video_type) {
  177. switch (video_type) {
  178. case VideoType::kUnknown:
  179. return libyuv::FOURCC_ANY;
  180. case VideoType::kI420:
  181. return libyuv::FOURCC_I420;
  182. case VideoType::kIYUV: // same as VideoType::kYV12
  183. case VideoType::kYV12:
  184. return libyuv::FOURCC_YV12;
  185. case VideoType::kRGB24:
  186. return libyuv::FOURCC_24BG;
  187. case VideoType::kABGR:
  188. return libyuv::FOURCC_ABGR;
  189. case VideoType::kRGB565:
  190. return libyuv::FOURCC_RGBP;
  191. case VideoType::kYUY2:
  192. return libyuv::FOURCC_YUY2;
  193. case VideoType::kUYVY:
  194. return libyuv::FOURCC_UYVY;
  195. case VideoType::kMJPEG:
  196. return libyuv::FOURCC_MJPG;
  197. case VideoType::kNV21:
  198. return libyuv::FOURCC_NV21;
  199. case VideoType::kNV12:
  200. return libyuv::FOURCC_NV12;
  201. case VideoType::kARGB:
  202. return libyuv::FOURCC_ARGB;
  203. case VideoType::kBGRA:
  204. return libyuv::FOURCC_BGRA;
  205. case VideoType::kARGB4444:
  206. return libyuv::FOURCC_R444;
  207. case VideoType::kARGB1555:
  208. return libyuv::FOURCC_RGBO;
  209. }
  210. return libyuv::FOURCC_ANY;
  211. }
  212. size_t CalcBufferSize(VideoType type, int width, int height)
  213. {
  214. size_t buffer_size = 0;
  215. switch (type) {
  216. case VideoType::kI420:
  217. case VideoType::kNV12:
  218. case VideoType::kNV21:
  219. case VideoType::kIYUV:
  220. case VideoType::kYV12: {
  221. int half_width = (width + 1) >> 1;
  222. int half_height = (height + 1) >> 1;
  223. buffer_size = width * height + half_width * half_height * 2;
  224. break;
  225. }
  226. case VideoType::kARGB4444:
  227. case VideoType::kRGB565:
  228. case VideoType::kARGB1555:
  229. case VideoType::kYUY2:
  230. case VideoType::kUYVY:
  231. buffer_size = width * height * 2;
  232. break;
  233. case VideoType::kRGB24:
  234. buffer_size = width * height * 3;
  235. break;
  236. case VideoType::kBGRA:
  237. case VideoType::kARGB:
  238. buffer_size = width * height * 4;
  239. break;
  240. default:
  241. break;
  242. }
  243. return buffer_size;
  244. }
  245. int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
  246. return stride_y * height + (stride_u + stride_v) * ((height + 1) / 2);
  247. }
  248. int RGB24DataSize(int height, int stride_y, int stride_u, int stride_v) {
  249. return stride_y * height * 2 + ((stride_u + stride_v) * ((height + 1) / 2) * 2);
  250. }
  251. bool CheackRotateParam(int width, int height, libyuv::RotationMode eRotate, int dst_width, int dst_height)
  252. {
  253. bool bret = false;
  254. if (width == dst_width && height == dst_height){
  255. if (libyuv::kRotate0 == eRotate || libyuv::kRotate180 == eRotate){
  256. bret = true;
  257. }
  258. }
  259. else {
  260. if (width == dst_height && height == dst_width){
  261. if (libyuv::kRotate90 == eRotate || libyuv::kRotate270 == eRotate) {
  262. bret = true;
  263. }
  264. }
  265. }
  266. return bret;
  267. }
  268. Buffer* VideoCaptureImpl::GetCaptureBuffer()
  269. {
  270. return m_pool;
  271. }
  272. int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
  273. size_t videoFrameLength,
  274. const VideoCaptureCapability& frameInfo,
  275. int64_t captureTime /*=0*/)
  276. {
  277. const int32_t width = frameInfo.width;
  278. const int32_t height = frameInfo.height;
  279. if (0 == m_ilogcount){
  280. //char strmsg[256] = { 0 };
  281. //snprintf(strmsg, 256, "IncomingFrame capture_time is %d, videoType=%d, rotate=%d, videoFrameLength=%d, width=%d, height=%d, and destination width=%d, height=%d.", captureTime, frameInfo.videoType, m_rotate, videoFrameLength, width, height, m_out_cap_width, m_out_cap_height);
  282. //CapLogEvent(1, strmsg);
  283. m_ilogcount++;
  284. }
  285. // Not encoded, convert to I420.
  286. if (frameInfo.videoType != VideoType::kMJPEG &&
  287. CalcBufferSize(frameInfo.videoType, width, abs(height)) != videoFrameLength) {
  288. CapLog("Wrong incoming frame length.");
  289. return -1;
  290. }
  291. int stride_y = m_in_cap_width;
  292. int stride_u = (m_in_cap_width + 1)/2;
  293. int stride_v = (m_in_cap_width + 1)/2;
  294. //uint8_t* i420y = (uint8_t*)AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v), kBufferAlignment);
  295. //uint8_t* brg24 = (uint8_t*)AlignedMalloc(RGB24DataSize(m_dest_cap_height, m_dest_cap_width, (m_dest_cap_width+1)/2, (m_dest_cap_width + 1) / 2), kBufferAlignment);
  296. int conversionResult = libyuv::ConvertToI420(videoFrame, videoFrameLength,
  297. m_i420,
  298. stride_y,
  299. m_i420 + stride_y * m_in_cap_height,
  300. stride_u,
  301. m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2),
  302. stride_v,
  303. 0,
  304. (height - m_in_cap_height) / 2, // No Cropping
  305. width,
  306. height,
  307. width,
  308. m_in_cap_height,
  309. libyuv::kRotate180,
  310. ConvertVideoType(frameInfo.videoType)
  311. );
  312. if (conversionResult < 0) {
  313. CapLog("Failed to convert capture frame from type %d to I420 for %s.", static_cast<int>(frameInfo.videoType), strerror(errno));
  314. return -1;
  315. }
  316. //{
  317. // video_frame frmi420 = { 0 };
  318. // frmi420.data[0] = m_i420;
  319. // frmi420.linesize[0] = m_in_cap_height * 3 / 2;
  320. // frmi420.width = m_in_cap_width;
  321. // frmi420.height = m_in_cap_height;
  322. // frmi420.format = VIDEO_FORMAT_I420;
  323. // //m_capture->param.on_frame_i420(m_capture->param.user_data, &frmi420);
  324. // char stroptname[260] = { 0 };
  325. // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_in_cap_width, m_in_cap_height);
  326. // video_frame_save_bmpfile(stroptname, &frmi420);
  327. //}
  328. if (libyuv::kRotate0 == m_rotate || libyuv::kRotate180 == m_rotate){
  329. conversionResult = libyuv::ConvertFromI420(m_i420,
  330. stride_y,
  331. m_i420 + stride_y * m_in_cap_height,
  332. stride_u,
  333. m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2),
  334. stride_v,
  335. m_rgb24,
  336. m_out_cap_width * 3,
  337. m_out_cap_width,
  338. m_out_cap_height,
  339. ConvertVideoType(kRGB24));
  340. if (conversionResult < 0) {
  341. CapLog("Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno));
  342. return -1;
  343. }
  344. }
  345. else {
  346. if (libyuv::kRotate90 == m_rotate || libyuv::kRotate270 == m_rotate) {
  347. libyuv::RotationMode erotate = libyuv::kRotate90;
  348. if (libyuv::kRotate90 == m_rotate) {
  349. erotate = libyuv::kRotate270;
  350. }
  351. int opt_stride_y = m_out_cap_width;
  352. int opt_stride_u = (m_out_cap_width + 1) / 2;
  353. int opt_stride_v = (m_out_cap_width + 1) / 2;
  354. //uint8_t* iopt420 = (uint8_t*)AlignedMalloc(I420DataSize(m_dest_cap_height, opt_stride_y, opt_stride_u, opt_stride_v), kBufferAlignment);
  355. int rotateResult = libyuv::I420Rotate(m_i420,
  356. stride_y,
  357. m_i420 + stride_y * m_in_cap_height,
  358. stride_u,
  359. m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2),
  360. stride_v,
  361. m_opti420,
  362. opt_stride_y,
  363. m_opti420 + opt_stride_y * m_out_cap_height,
  364. opt_stride_u,
  365. m_opti420 + opt_stride_y * m_out_cap_height + opt_stride_u * ((m_out_cap_height + 1) / 2),
  366. opt_stride_v,
  367. m_in_cap_width,
  368. m_in_cap_height,
  369. erotate);
  370. if (rotateResult < 0) {
  371. CapLog("Failed to Rotate Frame %d for %s.", (int)erotate, strerror(errno));
  372. return -1;
  373. }
  374. //{
  375. // video_frame frmi420 = { 0 };
  376. // frmi420.data[0] = m_opti420;
  377. // frmi420.linesize[0] = m_out_cap_width * 3 / 2;
  378. // frmi420.width = m_out_cap_width;
  379. // frmi420.height = m_out_cap_height;
  380. // frmi420.format = VIDEO_FORMAT_I420;
  381. // //m_capture->param.on_frame_i420(m_capture->param.user_data, &frmi420);
  382. // char stroptname[260] = { 0 };
  383. // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_out_cap_width, m_out_cap_height);
  384. // video_frame_save_bmpfile(stroptname, &frmi420);
  385. //}
  386. //yu12_to_dib24(brg24, iopt420, m_dest_cap_width, m_dest_cap_height);
  387. conversionResult = libyuv::ConvertFromI420(m_opti420,
  388. opt_stride_y,
  389. m_opti420 + opt_stride_y * m_out_cap_height,
  390. opt_stride_u,
  391. m_opti420 + opt_stride_y * m_out_cap_height + opt_stride_u * ((m_out_cap_height + 1) / 2),
  392. opt_stride_v,
  393. m_rgb24,
  394. m_out_cap_width * 3,
  395. m_out_cap_width,
  396. m_out_cap_height,
  397. ConvertVideoType(kRGB24));
  398. if (conversionResult < 0) {
  399. CapLog("Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno));
  400. return -1;
  401. }
  402. //AlignedFree(iopt420);
  403. //iopt420 = NULL;
  404. }
  405. }
  406. if (NULL != m_capture->param.on_frame) {
  407. video_frame frm = { 0 };
  408. frm.data[0] = m_rgb24;
  409. frm.linesize[0] = m_out_cap_width * 3;
  410. frm.width = m_out_cap_width;
  411. frm.height = m_out_cap_height;
  412. frm.format = VIDEO_FORMAT_RGB24;
  413. m_capture->param.on_frame(m_capture->param.user_data, &frm);
  414. //char strrgbname[260] = { 0 };
  415. //snprintf(strrgbname, 260, "%d_%d_%d_%d_rgb.bmp", m_ilogcount, (int)m_rotate, m_out_cap_width, m_out_cap_height);
  416. //video_frame_save_bmpfile(strrgbname, &frm);
  417. //m_ilogcount++;
  418. }
  419. //AlignedFree(i420y);
  420. //i420y = NULL;
  421. //AlignedFree(brg24);
  422. //brg24 = NULL;
  423. return 0;
  424. }
  425. static void* VideoCaptureProcess(void *arg)
  426. {
  427. int retVal = 0;
  428. fd_set rSet;
  429. struct timeval timeout;
  430. VideoCaptureImpl* pVideoCapture = (VideoCaptureImpl*)arg;
  431. int iDeviceFd = pVideoCapture->GetCaptureVideoFd();
  432. while (false == pVideoCapture->GetStopCaptureFlag())
  433. {
  434. FD_ZERO(&rSet);
  435. FD_SET(iDeviceFd, &rSet);
  436. timeout.tv_sec = 5;
  437. timeout.tv_usec = 0;
  438. retVal = select(iDeviceFd + 1, &rSet, NULL, NULL, &timeout);
  439. if (retVal < 0 && errno != EINTR) // continue if interrupted
  440. {
  441. // select failed
  442. if (pVideoCapture){
  443. pVideoCapture->CapLog("exit for select failed.");
  444. }
  445. return NULL;
  446. }
  447. else if (retVal == 0) {
  448. // select timed out
  449. if (pVideoCapture){
  450. pVideoCapture->CapLog("exit for select timed out.");
  451. }
  452. return NULL;
  453. }
  454. else if (!FD_ISSET(iDeviceFd, &rSet)) {
  455. // not event on camera handle
  456. if (pVideoCapture){
  457. pVideoCapture->CapLog("exit for not event on camera handle.");
  458. }
  459. return NULL;
  460. }
  461. if (pVideoCapture->VideoCaptureStarted()) {
  462. struct v4l2_buffer buf;
  463. memset(&buf, 0, sizeof(struct v4l2_buffer));
  464. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  465. buf.memory = V4L2_MEMORY_MMAP;
  466. // dequeue a buffer - repeat until dequeued properly!
  467. while (ioctl(iDeviceFd, VIDIOC_DQBUF, &buf) < 0) {
  468. if (errno != EINTR) {
  469. if (pVideoCapture){
  470. pVideoCapture->CapLog("could not sync on a buffer on device %s.", strerror(errno));
  471. }
  472. return NULL;
  473. }
  474. }
  475. VideoCaptureCapability frameInfo;
  476. frameInfo.width = pVideoCapture->GetCapture_Width();
  477. frameInfo.height = pVideoCapture->GetCapture_Height();
  478. frameInfo.videoType = pVideoCapture->GetCaptureVideoType();
  479. //// convert to to I420 if needed
  480. Buffer* buffer_pool = pVideoCapture->GetCaptureBuffer();
  481. pVideoCapture->IncomingFrame((unsigned char*)buffer_pool[buf.index].start, buf.length, frameInfo);
  482. // enqueue the buffer again
  483. if (ioctl(iDeviceFd, VIDIOC_QBUF, &buf) == -1) {
  484. if (pVideoCapture){
  485. pVideoCapture->CapLog("Failed to enqueue capture buffer");
  486. }
  487. }
  488. }
  489. }
  490. usleep(0);
  491. return NULL;
  492. }
  493. int VideoCaptureImpl::StartVideoCapture()
  494. {
  495. if (m_bCaptureStarted){
  496. if (m_real_cap_width == mode_width[m_capture->param.cap_mode] &&
  497. m_real_cap_height == mode_height[m_capture->param.cap_mode] &&
  498. m_frame_fmt == m_capture->param.frame_fmt){
  499. return 0;
  500. }
  501. else {
  502. StopVideoCapture();
  503. }
  504. }
  505. // first open /dev/video device
  506. char device[20] = {0};
  507. snprintf(device, 20,"/dev/video%d", (int)m_deviceId);
  508. if ((m_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) {
  509. CapLog("error in opening %s for %s.", device, strerror(errno));
  510. return -1;
  511. }
  512. // Supported video formats in preferred order.
  513. // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
  514. // I420 otherwise.
  515. const int nFormats = 5;
  516. unsigned int fmts[nFormats];
  517. if (mode_width[m_capture->param.cap_mode] > 640 || mode_height[m_capture->param.cap_mode] > 480) {
  518. fmts[0] = V4L2_PIX_FMT_MJPEG;
  519. fmts[1] = V4L2_PIX_FMT_YUV420;
  520. fmts[2] = V4L2_PIX_FMT_YUYV;
  521. fmts[3] = V4L2_PIX_FMT_UYVY;
  522. fmts[4] = V4L2_PIX_FMT_JPEG;
  523. }
  524. else {
  525. fmts[0] = V4L2_PIX_FMT_YUV420;
  526. fmts[1] = V4L2_PIX_FMT_YUYV;
  527. fmts[2] = V4L2_PIX_FMT_UYVY;
  528. fmts[3] = V4L2_PIX_FMT_MJPEG;
  529. fmts[4] = V4L2_PIX_FMT_JPEG;
  530. }
  531. // Enumerate image formats.
  532. struct v4l2_fmtdesc fmt;
  533. int fmtsIdx = nFormats;
  534. memset(&fmt, 0, sizeof(fmt));
  535. fmt.index = 0;
  536. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  537. //CapLog("Video Capture enumerates supported image formats:");
  538. while (ioctl(m_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
  539. char strformat[32] = { 0 };
  540. GetFourccName(strformat, 32, fmt.pixelformat);
  541. //CapLog("pixelformat=%s, description='%s'", strformat, fmt.description);
  542. // Match the preferred order.
  543. for (int i = 0; i < nFormats; i++) {
  544. if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
  545. fmtsIdx = i;
  546. }
  547. // Keep enumerating.
  548. fmt.index++;
  549. }
  550. if (fmtsIdx == nFormats) {
  551. CapLog("no supporting video formats found");
  552. close(m_deviceFd);
  553. return -1;
  554. }
  555. else {
  556. char strformat[32] = { 0 };
  557. GetFourccName(strformat, 32, fmts[fmtsIdx]);
  558. //char strmsg[256] = { 0 };
  559. //snprintf(strmsg, 256, "we prefer format %s.", strformat);
  560. //CapLogEvent(1, strmsg);
  561. }
  562. struct v4l2_format video_fmt;
  563. memset(&video_fmt, 0, sizeof(v4l2_format));
  564. video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  565. video_fmt.fmt.pix.field = V4L2_FIELD_ANY;
  566. video_fmt.fmt.pix.width = mode_width[m_capture->param.cap_mode];
  567. video_fmt.fmt.pix.height = mode_height[m_capture->param.cap_mode];
  568. video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
  569. //CapLog("video_fmt.fmt.pix.width = %d, video_fmt.fmt.pix.height = %d.", video_fmt.fmt.pix.width, video_fmt.fmt.pix.height);
  570. if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
  571. m_captureVideoType = VideoType::kYUY2;
  572. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
  573. m_captureVideoType = VideoType::kI420;
  574. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
  575. m_captureVideoType = VideoType::kUYVY;
  576. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
  577. video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
  578. m_captureVideoType = VideoType::kMJPEG;
  579. // set format and frame size now
  580. if (ioctl(m_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) {
  581. CapLog("error in VIDIOC_S_FMT for %s.", strerror(errno));
  582. close(m_deviceFd);
  583. return -1;
  584. }
  585. else
  586. {
  587. if (ioctl(m_deviceFd, VIDIOC_G_FMT, &video_fmt) < 0){
  588. CapLog("error in VIDIOC_G_FMT for %s.", strerror(errno));
  589. close(m_deviceFd);
  590. return -1;
  591. }
  592. else
  593. {
  594. // initialize current width and height
  595. m_real_cap_width = video_fmt.fmt.pix.width;
  596. m_real_cap_height = video_fmt.fmt.pix.height;
  597. //CapLog("real camera capture m_capture_width = %d, m_capture_height = %d.", m_real_cap_width, m_real_cap_height);
  598. }
  599. }
  600. // Trying to set frame rate, before check driver capability.
  601. bool driver_framerate_support = true;
  602. struct v4l2_streamparm streamparms;
  603. memset(&streamparms, 0, sizeof(streamparms));
  604. streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  605. if (ioctl(m_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
  606. CapLog("error in VIDIOC_G_PARM,and error info is %s.", strerror(errno));
  607. driver_framerate_support = false;
  608. // continue
  609. }
  610. else {
  611. // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
  612. if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
  613. // driver supports the feature. Set required framerate.
  614. memset(&streamparms, 0, sizeof(streamparms));
  615. streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  616. streamparms.parm.capture.timeperframe.numerator = 1;
  617. streamparms.parm.capture.timeperframe.denominator = (int32_t)m_capture->param.fps;
  618. if (ioctl(m_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
  619. CapLog("Failed to set the framerate. error info is %s.", strerror(errno));
  620. driver_framerate_support = false;
  621. }
  622. else {
  623. m_currentFrameRate = (int32_t)m_capture->param.fps;
  624. //char strframerate[256] = { 0 };
  625. //snprintf(strframerate, 256, "Set Camera video capture rate to %d, and numerator is %d, denominator is %d.", m_currentFrameRate, streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
  626. //CapLogEvent(0, strframerate);
  627. if (ioctl(m_deviceFd, VIDIOC_G_PARM, &streamparms) == 0) {
  628. //char stroutrate[256] = { 0 };
  629. //snprintf(stroutrate, 256, "Get video capture numerator is %d, denominator is %d.", streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
  630. //CapLogEvent(1, stroutrate);
  631. }
  632. }
  633. }
  634. }
  635. // If driver doesn't support framerate control, need to hardcode.
  636. // Hardcoding the value based on the frame size.
  637. if (!driver_framerate_support) {
  638. if (m_in_cap_width >= 800 && m_captureVideoType != VideoType::kMJPEG) {
  639. m_currentFrameRate = 15;
  640. }
  641. else {
  642. m_currentFrameRate = 5;
  643. //CapLog("The Camera not support set video capture framerate, set capture rate to %d.", m_currentFrameRate);
  644. }
  645. }
  646. if (false == GetCamBrightnessInfo()) {
  647. close(m_deviceFd);
  648. return -1;
  649. }
  650. if (!AllocateVideoCapturebuffer()) {
  651. CapLog("failed to allocate video capture buffers");
  652. close(m_deviceFd);
  653. return -1;
  654. }
  655. if (-1 == pthread_create(&m_CaptureThreadId, NULL, VideoCaptureProcess, this)) {
  656. CapLog("Create Video Capture Thread Failed!");
  657. close(m_deviceFd);
  658. return -1;
  659. }
  660. // Needed to start UVC camera - from the uvcview application
  661. enum v4l2_buf_type type;
  662. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  663. if (ioctl(m_deviceFd, VIDIOC_STREAMON, &type) == -1) {
  664. CapLog("failed to turn on stream for %s.", strerror(errno));
  665. close(m_deviceFd);
  666. return -1;
  667. }
  668. m_bCaptureStarted = true;
  669. return 0;
  670. }
  671. bool VideoCaptureImpl::AllocateVideoCapturebuffer()
  672. {
  673. return AllocateVideoBuffers() && AlignedMallocVideoBuffer();
  674. }
  675. //critical section protected by the caller
  676. bool VideoCaptureImpl::AllocateVideoBuffers()
  677. {
  678. struct v4l2_requestbuffers rbuffer;
  679. memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
  680. rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //缓冲帧数据格式
  681. rbuffer.memory = V4L2_MEMORY_MMAP; //是内存映射还是用户指针方式
  682. rbuffer.count = kNoOfV4L2Bufffers; //缓冲区缓冲帧的数目
  683. //向设备申请缓冲区
  684. if (ioctl(m_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0){
  685. CapLog("Could not get buffers from device for %s.", strerror(errno));
  686. return false;
  687. }
  688. if (rbuffer.count > kNoOfV4L2Bufffers) {
  689. rbuffer.count = kNoOfV4L2Bufffers;
  690. }
  691. m_buffersAllocatedByDevice = rbuffer.count;
  692. //Map the buffers
  693. m_pool = new Buffer[rbuffer.count];
  694. for (unsigned int i = 0; i < rbuffer.count; i++)
  695. {
  696. struct v4l2_buffer buffer;
  697. memset(&buffer, 0, sizeof(v4l2_buffer));
  698. buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  699. buffer.memory = V4L2_MEMORY_MMAP;
  700. buffer.index = i;
  701. //获取缓冲帧的地址,长度
  702. if (ioctl(m_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0){
  703. return false;
  704. }
  705. m_pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, m_deviceFd, buffer.m.offset);
  706. if (MAP_FAILED == m_pool[i].start){
  707. for (unsigned int j = 0; j < i; j++)
  708. munmap(m_pool[j].start, m_pool[j].length);
  709. return false;
  710. }
  711. m_pool[i].length = buffer.length;
  712. if (ioctl(m_deviceFd, VIDIOC_QBUF, &buffer) < 0){
  713. return false;
  714. }
  715. }
  716. return true;
  717. }
  718. bool VideoCaptureImpl::DeAllocateVideoBuffers()
  719. {
  720. // unmap buffers
  721. for (int i = 0; i < m_buffersAllocatedByDevice; i++) {
  722. munmap(m_pool[i].start, m_pool[i].length);
  723. }
  724. delete[] m_pool;
  725. // turn off stream
  726. enum v4l2_buf_type type;
  727. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  728. if (ioctl(m_deviceFd, VIDIOC_STREAMOFF, &type) < 0){
  729. CapLog("VIDIOC_STREAMOFF error. error no: %d", errno);
  730. }
  731. return true;
  732. }
  733. bool VideoCaptureImpl::AlignedMallocVideoBuffer()
  734. {
  735. bool bret = false;
  736. int stride_y = m_in_cap_width;
  737. int stride_u = (m_in_cap_width + 1) / 2;
  738. int stride_v = (m_in_cap_width + 1) / 2;
  739. m_i420 = (uint8_t*)AlignedMalloc(I420DataSize(m_in_cap_height, stride_y, stride_u, stride_v), kBufferAlignment);
  740. m_rgb24 = (uint8_t*)AlignedMalloc(RGB24DataSize(m_out_cap_height, m_out_cap_width, (m_out_cap_width + 1) / 2, (m_out_cap_width + 1) / 2), kBufferAlignment);
  741. int opt_stride_y = m_out_cap_width;
  742. int opt_stride_u = (m_out_cap_width + 1) / 2;
  743. int opt_stride_v = (m_out_cap_width + 1) / 2;
  744. m_opti420 = (uint8_t*)AlignedMalloc(I420DataSize(m_out_cap_height, opt_stride_y, opt_stride_u, opt_stride_v), kBufferAlignment);
  745. if (m_i420 && m_rgb24 && m_opti420){
  746. bret = true;
  747. }
  748. return bret;
  749. }
  750. bool VideoCaptureImpl::FreeAlignedMallocVideoBuffer()
  751. {
  752. if (NULL != m_i420){
  753. AlignedFree(m_i420);
  754. m_i420 = NULL;
  755. }
  756. if (NULL != m_rgb24) {
  757. AlignedFree(m_rgb24);
  758. m_rgb24 = NULL;
  759. }
  760. if (NULL != m_opti420) {
  761. AlignedFree(m_opti420);
  762. m_opti420 = NULL;
  763. }
  764. return true;
  765. }
  766. bool VideoCaptureImpl::VideoCaptureStarted()
  767. {
  768. return m_bCaptureStarted;
  769. }
  770. int VideoCaptureImpl::GetCaptureVideoFd()
  771. {
  772. return m_deviceFd;
  773. }
  774. VideoType VideoCaptureImpl::GetCaptureVideoType()
  775. {
  776. return m_captureVideoType;
  777. }
  778. int VideoCaptureImpl::GetCapture_Width()
  779. {
  780. return m_real_cap_width;
  781. }
  782. int VideoCaptureImpl::GetCapture_Height()
  783. {
  784. return m_real_cap_height;
  785. }
  786. bool VideoCaptureImpl::GetStopCaptureFlag()
  787. {
  788. return m_bStopCapture;
  789. }
  790. int VideoCaptureImpl::StopVideoCapture()
  791. {
  792. if (m_bCaptureStarted){
  793. m_bCaptureStarted = false;
  794. m_bStopCapture = true;
  795. if (0 == pthread_join(m_CaptureThreadId, NULL)) {
  796. m_CaptureThreadId = 0;
  797. CapLog("thread join video capture thread success.");
  798. }
  799. else {
  800. CapLog("thread join video capture thread failed for %s.", strerror(errno));
  801. }
  802. DeAllocateVideoBuffers();
  803. FreeAlignedMallocVideoBuffer();
  804. close(m_deviceFd);
  805. m_deviceFd = -1;
  806. CapLog("video capture has stopped!");
  807. }
  808. return 0;
  809. }
  810. void VideoCaptureImpl::VideoCaptureDestroy()
  811. {
  812. delete this;
  813. }
  814. int VideoCaptureImpl::GetCamBrightness(int* ibright, bool bRawRange)
  815. {
  816. int iret = -1;
  817. struct v4l2_control ctrl;
  818. ctrl.id = V4L2_CID_BRIGHTNESS;
  819. if (ioctl(m_deviceFd,VIDIOC_G_CTRL,&ctrl) == -1){
  820. CapLog("VIDIOC_S_CTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
  821. }
  822. else {
  823. if (bRawRange) {
  824. *ibright = ctrl.value;
  825. }
  826. else {
  827. *ibright = TransFromRealBrightnessValue(ctrl.value);
  828. }
  829. iret = 0;
  830. }
  831. return iret;
  832. }
  833. int VideoCaptureImpl::SetCamBrightness(int ibright, bool bRawRange)
  834. {
  835. int iret = -1;
  836. struct v4l2_control ctrl;
  837. ctrl.id = V4L2_CID_BRIGHTNESS;
  838. if (bRawRange) {
  839. ctrl.value = ibright;
  840. }
  841. else {
  842. ctrl.value = TransToRealBrightnessValue(ibright);
  843. }
  844. if (ioctl(m_deviceFd, VIDIOC_S_CTRL, &ctrl) == -1){
  845. CapLog("VIDIOC_S_CTRL set V4L2_CID_BRIGHTNESS error for %s.", strerror(errno));
  846. }
  847. else{
  848. iret = 0;
  849. }
  850. return iret;
  851. }
  852. int VideoCaptureImpl::SetCamAutoBrightness()
  853. {
  854. int iret = -1;
  855. struct v4l2_control ctrl;
  856. ctrl.id = V4L2_CID_BRIGHTNESS;
  857. ctrl.value = m_idefaultbrightness;
  858. if (ioctl(m_deviceFd, VIDIOC_S_CTRL, &ctrl) == -1) {
  859. CapLog("VIDIOC_S_CTRL set V4L2_CID_AUTOBRIGHTNESS error for %s", strerror(errno));
  860. }
  861. else {
  862. iret = 0;
  863. }
  864. iret = 0;
  865. return iret;
  866. }
  867. bool VideoCaptureImpl::GetCamBrightnessInfo()
  868. {
  869. bool bret = false;
  870. struct v4l2_queryctrl qctrl;
  871. qctrl.id = V4L2_CID_BRIGHTNESS;
  872. if (ioctl(m_deviceFd, VIDIOC_QUERYCTRL, &qctrl) == -1) {
  873. CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
  874. }
  875. else {
  876. //CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS success {min(%d) - max(%d)},default is %d", qctrl.minimum, qctrl.maximum, qctrl.default_value);
  877. m_idefaultbrightness = qctrl.default_value;
  878. m_iminbrightness = qctrl.minimum;
  879. m_imaxbrightness = qctrl.maximum;
  880. bret = true;
  881. }
  882. return bret;
  883. }
  884. bool VideoCaptureImpl::GetCamRawBrightnessRange(int* imin, int* imax)
  885. {
  886. bool bret = false;
  887. struct v4l2_queryctrl qctrl;
  888. qctrl.id = V4L2_CID_BRIGHTNESS;
  889. if (ioctl(m_deviceFd, VIDIOC_QUERYCTRL, &qctrl) == -1) {
  890. CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
  891. }
  892. else {
  893. CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS success {min(%d) - max(%d)}, default is %d.", qctrl.minimum, qctrl.maximum, qctrl.default_value);
  894. *imin = qctrl.minimum;
  895. *imax = qctrl.maximum;
  896. bret = true;
  897. }
  898. return bret;
  899. }
  900. //100 to real brightness value
  901. int VideoCaptureImpl::TransToRealBrightnessValue(int ibright)
  902. {
  903. float fvalue = ibright * (m_imaxbrightness - m_iminbrightness) / 10;
  904. int ivalue = fvalue;
  905. int ilast = ivalue % 10;
  906. int inum = ivalue / 10;
  907. if (ilast >= 5) {
  908. inum++;
  909. }
  910. inum += m_iminbrightness;
  911. if (inum < m_iminbrightness){
  912. inum = m_iminbrightness;
  913. }
  914. if (inum > m_imaxbrightness){
  915. inum = m_imaxbrightness;
  916. }
  917. return inum;
  918. }
  919. //real brightness value to [0-100]
  920. int VideoCaptureImpl::TransFromRealBrightnessValue(int ibright)
  921. {
  922. int itotal = m_imaxbrightness - m_iminbrightness;
  923. int ivalue = ibright - m_iminbrightness;
  924. float fvalue = ivalue * 1000 / itotal;
  925. ivalue = fvalue;
  926. int ilast = ivalue % 10;
  927. int inum = ivalue / 10;
  928. if (ilast >= 5) {
  929. inum++;
  930. }
  931. return inum;
  932. }
  933. libyuv::RotationMode VideoCaptureImpl::RotateTrans(int irotate)
  934. {
  935. libyuv::RotationMode rotation_mode = libyuv::kRotate0;
  936. switch (irotate) {
  937. case 0:
  938. rotation_mode = libyuv::kRotate0;
  939. break;
  940. case 90:
  941. rotation_mode = libyuv::kRotate90;
  942. break;
  943. case 180:
  944. rotation_mode = libyuv::kRotate180;
  945. break;
  946. case 270:
  947. rotation_mode = libyuv::kRotate270;
  948. break;
  949. }
  950. return rotation_mode;
  951. }
  952. void VideoCaptureImpl::CapLog(const char* fmt, ...)
  953. {
  954. if (m_callback.debug) {
  955. va_list arg;
  956. va_start(arg, fmt);
  957. (*m_callback.debug)(m_callback.user_data, fmt, arg);
  958. va_end(arg);
  959. }
  960. }
  961. void VideoCaptureImpl::CapLogEvent(int itype, const char* strmessage)
  962. {
  963. if (m_callback.logevent) {
  964. (*m_callback.logevent)(itype, strmessage);
  965. }
  966. }