#include"videocapture_linux.h" #include "../../libvideoframework/videoutil.h" #include "../../libvideoframework/aligned_malloc.h" #include #include #include #include #include #include #include #include #include #include #include #include #ifdef __cplusplus extern "C" { #endif #include #include #include #ifdef __cplusplus } #endif static const int kBufferAlignment = 64; // Get FourCC code as a string. int GetFourccName(char* strbuf, uint32_t ulen, uint32_t fourcc) { int iret = -1; if (NULL == strbuf) { return iret; } for (uint32_t i = 0; i < sizeof(uint32_t) && i < ulen; i++) { uint32_t uindex = i * 8; strbuf[i] = (fourcc >> uindex) & 0xFF; } iret = 0; return iret; } VideoCaptureImpl::VideoCaptureImpl(videocap_callback_t* pCallback) { memcpy(&m_callback, pCallback, sizeof(videocap_callback_t)); m_capture = NULL; m_bCaptureStarted = false; m_deviceId = -1; m_deviceFd = -1; m_in_cap_width = 0; m_in_cap_height = 0; m_real_cap_width = 0; m_real_cap_height = 0; m_out_cap_width = 0; m_out_cap_height = 0; m_rotate = libyuv::kRotate0; m_frame_fmt = VIDEO_FORMAT_I420; m_captureVideoType = VideoType::kI420; m_currentFrameRate = -1; m_buffersAllocatedByDevice = -1; m_pool = NULL; m_CaptureThreadId = 0; m_bStopCapture = false; m_i420 = NULL; m_opti420 = NULL; m_rgb24 = NULL; m_iminbrightness = 0; m_imaxbrightness = 0; m_ilogcount = 0; } VideoCaptureImpl::~VideoCaptureImpl() { m_ilogcount = 0; m_bCaptureStarted = false; m_bStopCapture = false; StopVideoCapture(); if (m_deviceFd != -1) { close(m_deviceFd); } if (NULL != m_capture){ free(m_capture); m_capture = NULL; } } int VideoCaptureImpl::VideoCaptureSetParam(videocap_param_t* param) { /* check param */ if (NULL == param) { return -1; } if (param->cap_mode < 0 || param->cap_mode >= VIDEOCAP_MAX_MODE) { return -1; } if (param->frame_fmt != VIDEO_FORMAT_I420 && param->frame_fmt != VIDEO_FORMAT_RGB24) { return -1; } if (param->fps < 1.0 || param->fps > 50.0) { return -1; } if (param->pre_hwnd){ if (param->pre_width < 0 || param->pre_height < 0) { return -1; } } if (param->dev_id >= 0) { m_deviceId = param->dev_id; } else { return -1; } if (param->frame_fmt == VIDEO_FORMAT_I420 && !(param->option & VIDEOCAP_OPT_EANBLE_RESIZE)) { param->res_mode = param->cap_mode; param->option |= VIDEOCAP_OPT_EANBLE_RESIZE; } if (param->option & VIDEOCAP_OPT_EANBLE_RESIZE) { if (param->res_mode < VIDEOCAP_FRAME_SQCIF || param->res_mode > VIDEOCAP_FRAME_SVGA) { return -1; } } else { //CapLog("%s", "param->option & VIDEOCAP_OPT_EANBLE_RESIZE success."); } m_capture = (videocap_t*)malloc(sizeof(videocap_t)); if (!m_capture) { return -1; } memset((void*)m_capture, 0, sizeof(videocap_t)); memcpy(&m_capture->param, param, sizeof(videocap_param_t)); if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) { int width = mode_width[param->cap_mode]; int height = mode_height[param->cap_mode]; if (video_frame_alloc(width, height, param->frame_fmt, &m_capture->cap_frame) != 0) { free(m_capture); return -1; } video_frame_fill_black(&m_capture->cap_frame); } if (param->option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB) { } if (param->option & VIDEOCAP_OPT_EANBLE_RESIZE) { int width = mode_width[param->res_mode]; int height = mode_height[param->res_mode]; if (video_frame_alloc(width, height, param->frame_fmt, &m_capture->res_frame) != 0) { if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) { video_frame_free(&m_capture->res_frame); } free(m_capture); return -1; } video_frame_fill_black(&m_capture->res_frame); m_capture->sws_context = sws_getContext(mode_width[param->cap_mode], mode_height[param->cap_mode], AV_PIX_FMT_BGR24, mode_width[param->res_mode], mode_height[param->res_mode], m_capture->param.frame_fmt == VIDEO_FORMAT_RGB24 ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL); if (!m_capture->sws_context) { video_frame_free(&m_capture->res_frame); if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) { video_frame_free(&m_capture->cap_frame); } free(m_capture); return -1; } } m_rotate = RotateTrans(param->irotate); m_in_cap_width = m_out_cap_width = mode_width[m_capture->param.cap_mode]; m_in_cap_height = m_out_cap_height = mode_height[m_capture->param.cap_mode]; if (libyuv::kRotate90 == m_rotate || libyuv::kRotate270 == m_rotate){ m_out_cap_width = mode_height[m_capture->param.cap_mode]; m_out_cap_height = mode_width[m_capture->param.cap_mode]; } return 0; } int ConvertVideoType(VideoType video_type) { switch (video_type) { case VideoType::kUnknown: return libyuv::FOURCC_ANY; case VideoType::kI420: return libyuv::FOURCC_I420; case VideoType::kIYUV: // same as VideoType::kYV12 case VideoType::kYV12: return libyuv::FOURCC_YV12; case VideoType::kRGB24: return libyuv::FOURCC_24BG; case VideoType::kABGR: return libyuv::FOURCC_ABGR; case VideoType::kRGB565: return libyuv::FOURCC_RGBP; case VideoType::kYUY2: return libyuv::FOURCC_YUY2; case VideoType::kUYVY: return libyuv::FOURCC_UYVY; case VideoType::kMJPEG: return libyuv::FOURCC_MJPG; case VideoType::kNV21: return libyuv::FOURCC_NV21; case VideoType::kNV12: return libyuv::FOURCC_NV12; case VideoType::kARGB: return libyuv::FOURCC_ARGB; case VideoType::kBGRA: return libyuv::FOURCC_BGRA; case VideoType::kARGB4444: return libyuv::FOURCC_R444; case VideoType::kARGB1555: return libyuv::FOURCC_RGBO; } return libyuv::FOURCC_ANY; } size_t CalcBufferSize(VideoType type, int width, int height) { size_t buffer_size = 0; switch (type) { case VideoType::kI420: case VideoType::kNV12: case VideoType::kNV21: case VideoType::kIYUV: case VideoType::kYV12: { int half_width = (width + 1) >> 1; int half_height = (height + 1) >> 1; buffer_size = width * height + half_width * half_height * 2; break; } case VideoType::kARGB4444: case VideoType::kRGB565: case VideoType::kARGB1555: case VideoType::kYUY2: case VideoType::kUYVY: buffer_size = width * height * 2; break; case VideoType::kRGB24: buffer_size = width * height * 3; break; case VideoType::kBGRA: case VideoType::kARGB: buffer_size = width * height * 4; break; default: break; } return buffer_size; } int I420DataSize(int height, int stride_y, int stride_u, int stride_v) { return stride_y * height + (stride_u + stride_v) * ((height + 1) / 2); } int RGB24DataSize(int height, int stride_y, int stride_u, int stride_v) { return stride_y * height * 2 + ((stride_u + stride_v) * ((height + 1) / 2) * 2); } bool CheackRotateParam(int width, int height, libyuv::RotationMode eRotate, int dst_width, int dst_height) { bool bret = false; if (width == dst_width && height == dst_height){ if (libyuv::kRotate0 == eRotate || libyuv::kRotate180 == eRotate){ bret = true; } } else { if (width == dst_height && height == dst_width){ if (libyuv::kRotate90 == eRotate || libyuv::kRotate270 == eRotate) { bret = true; } } } return bret; } Buffer* VideoCaptureImpl::GetCaptureBuffer() { return m_pool; } int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, size_t videoFrameLength, const VideoCaptureCapability& frameInfo, int64_t captureTime /*=0*/) { const int32_t width = frameInfo.width; const int32_t height = frameInfo.height; if (0 == m_ilogcount){ //char strmsg[256] = { 0 }; //snprintf(strmsg, 256, "IncomingFrame capture_time is %d, videoType=%d, rotate=%d, videoFrameLength=%d, width=%d, height=%d, and destination width=%d, height=%d.", captureTime, frameInfo.videoType, m_rotate, videoFrameLength, width, height, m_out_cap_width, m_out_cap_height); //CapLogEvent(1, strmsg); m_ilogcount++; } // Not encoded, convert to I420. if (frameInfo.videoType != VideoType::kMJPEG && CalcBufferSize(frameInfo.videoType, width, abs(height)) != videoFrameLength) { CapLog("Wrong incoming frame length."); return -1; } int stride_y = m_in_cap_width; int stride_u = (m_in_cap_width + 1)/2; int stride_v = (m_in_cap_width + 1)/2; //uint8_t* i420y = (uint8_t*)AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v), kBufferAlignment); //uint8_t* brg24 = (uint8_t*)AlignedMalloc(RGB24DataSize(m_dest_cap_height, m_dest_cap_width, (m_dest_cap_width+1)/2, (m_dest_cap_width + 1) / 2), kBufferAlignment); int conversionResult = libyuv::ConvertToI420(videoFrame, videoFrameLength, m_i420, stride_y, m_i420 + stride_y * m_in_cap_height, stride_u, m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2), stride_v, 0, (height - m_in_cap_height) / 2, // No Cropping width, height, width, m_in_cap_height, libyuv::kRotate180, ConvertVideoType(frameInfo.videoType) ); if (conversionResult < 0) { CapLog("Failed to convert capture frame from type %d to I420 for %s.", static_cast(frameInfo.videoType), strerror(errno)); return -1; } //{ // video_frame frmi420 = { 0 }; // frmi420.data[0] = m_i420; // frmi420.linesize[0] = m_in_cap_height * 3 / 2; // frmi420.width = m_in_cap_width; // frmi420.height = m_in_cap_height; // frmi420.format = VIDEO_FORMAT_I420; // //m_capture->param.on_frame_i420(m_capture->param.user_data, &frmi420); // char stroptname[260] = { 0 }; // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_in_cap_width, m_in_cap_height); // video_frame_save_bmpfile(stroptname, &frmi420); //} if (libyuv::kRotate0 == m_rotate || libyuv::kRotate180 == m_rotate){ conversionResult = libyuv::ConvertFromI420(m_i420, stride_y, m_i420 + stride_y * m_in_cap_height, stride_u, m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2), stride_v, m_rgb24, m_out_cap_width * 3, m_out_cap_width, m_out_cap_height, ConvertVideoType(kRGB24)); if (conversionResult < 0) { CapLog("Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno)); return -1; } } else { if (libyuv::kRotate90 == m_rotate || libyuv::kRotate270 == m_rotate) { libyuv::RotationMode erotate = libyuv::kRotate90; if (libyuv::kRotate90 == m_rotate) { erotate = libyuv::kRotate270; } int opt_stride_y = m_out_cap_width; int opt_stride_u = (m_out_cap_width + 1) / 2; int opt_stride_v = (m_out_cap_width + 1) / 2; //uint8_t* iopt420 = (uint8_t*)AlignedMalloc(I420DataSize(m_dest_cap_height, opt_stride_y, opt_stride_u, opt_stride_v), kBufferAlignment); int rotateResult = libyuv::I420Rotate(m_i420, stride_y, m_i420 + stride_y * m_in_cap_height, stride_u, m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2), stride_v, m_opti420, opt_stride_y, m_opti420 + opt_stride_y * m_out_cap_height, opt_stride_u, m_opti420 + opt_stride_y * m_out_cap_height + opt_stride_u * ((m_out_cap_height + 1) / 2), opt_stride_v, m_in_cap_width, m_in_cap_height, erotate); if (rotateResult < 0) { CapLog("Failed to Rotate Frame %d for %s.", (int)erotate, strerror(errno)); return -1; } //{ // video_frame frmi420 = { 0 }; // frmi420.data[0] = m_opti420; // frmi420.linesize[0] = m_out_cap_width * 3 / 2; // frmi420.width = m_out_cap_width; // frmi420.height = m_out_cap_height; // frmi420.format = VIDEO_FORMAT_I420; // //m_capture->param.on_frame_i420(m_capture->param.user_data, &frmi420); // char stroptname[260] = { 0 }; // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_out_cap_width, m_out_cap_height); // video_frame_save_bmpfile(stroptname, &frmi420); //} //yu12_to_dib24(brg24, iopt420, m_dest_cap_width, m_dest_cap_height); conversionResult = libyuv::ConvertFromI420(m_opti420, opt_stride_y, m_opti420 + opt_stride_y * m_out_cap_height, opt_stride_u, m_opti420 + opt_stride_y * m_out_cap_height + opt_stride_u * ((m_out_cap_height + 1) / 2), opt_stride_v, m_rgb24, m_out_cap_width * 3, m_out_cap_width, m_out_cap_height, ConvertVideoType(kRGB24)); if (conversionResult < 0) { CapLog("Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno)); return -1; } //AlignedFree(iopt420); //iopt420 = NULL; } } if (NULL != m_capture->param.on_frame) { video_frame frm = { 0 }; frm.data[0] = m_rgb24; frm.linesize[0] = m_out_cap_width * 3; frm.width = m_out_cap_width; frm.height = m_out_cap_height; frm.format = VIDEO_FORMAT_RGB24; m_capture->param.on_frame(m_capture->param.user_data, &frm); //char strrgbname[260] = { 0 }; //snprintf(strrgbname, 260, "%d_%d_%d_%d_rgb.bmp", m_ilogcount, (int)m_rotate, m_out_cap_width, m_out_cap_height); //video_frame_save_bmpfile(strrgbname, &frm); //m_ilogcount++; } //AlignedFree(i420y); //i420y = NULL; //AlignedFree(brg24); //brg24 = NULL; return 0; } static void* VideoCaptureProcess(void *arg) { int retVal = 0; fd_set rSet; struct timeval timeout; VideoCaptureImpl* pVideoCapture = (VideoCaptureImpl*)arg; int iDeviceFd = pVideoCapture->GetCaptureVideoFd(); while (false == pVideoCapture->GetStopCaptureFlag()) { FD_ZERO(&rSet); FD_SET(iDeviceFd, &rSet); timeout.tv_sec = 5; timeout.tv_usec = 0; retVal = select(iDeviceFd + 1, &rSet, NULL, NULL, &timeout); if (retVal < 0 && errno != EINTR) // continue if interrupted { // select failed if (pVideoCapture){ pVideoCapture->CapLog("exit for select failed."); } return NULL; } else if (retVal == 0) { // select timed out if (pVideoCapture){ pVideoCapture->CapLog("exit for select timed out."); } return NULL; } else if (!FD_ISSET(iDeviceFd, &rSet)) { // not event on camera handle if (pVideoCapture){ pVideoCapture->CapLog("exit for not event on camera handle."); } return NULL; } if (pVideoCapture->VideoCaptureStarted()) { struct v4l2_buffer buf; memset(&buf, 0, sizeof(struct v4l2_buffer)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; // dequeue a buffer - repeat until dequeued properly! while (ioctl(iDeviceFd, VIDIOC_DQBUF, &buf) < 0) { if (errno != EINTR) { if (pVideoCapture){ pVideoCapture->CapLog("could not sync on a buffer on device %s.", strerror(errno)); } return NULL; } } VideoCaptureCapability frameInfo; frameInfo.width = pVideoCapture->GetCapture_Width(); frameInfo.height = pVideoCapture->GetCapture_Height(); frameInfo.videoType = pVideoCapture->GetCaptureVideoType(); //// convert to to I420 if needed Buffer* buffer_pool = pVideoCapture->GetCaptureBuffer(); pVideoCapture->IncomingFrame((unsigned char*)buffer_pool[buf.index].start, buf.length, frameInfo); // enqueue the buffer again if (ioctl(iDeviceFd, VIDIOC_QBUF, &buf) == -1) { if (pVideoCapture){ pVideoCapture->CapLog("Failed to enqueue capture buffer"); } } } } usleep(0); return NULL; } int VideoCaptureImpl::StartVideoCapture() { if (m_bCaptureStarted){ if (m_real_cap_width == mode_width[m_capture->param.cap_mode] && m_real_cap_height == mode_height[m_capture->param.cap_mode] && m_frame_fmt == m_capture->param.frame_fmt){ return 0; } else { StopVideoCapture(); } } // first open /dev/video device char device[20] = {0}; snprintf(device, 20,"/dev/video%d", (int)m_deviceId); if ((m_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) { CapLog("error in opening %s for %s.", device, strerror(errno)); return -1; } // Supported video formats in preferred order. // If the requested resolution is larger than VGA, we prefer MJPEG. Go for // I420 otherwise. const int nFormats = 5; unsigned int fmts[nFormats]; if (mode_width[m_capture->param.cap_mode] > 640 || mode_height[m_capture->param.cap_mode] > 480) { fmts[0] = V4L2_PIX_FMT_MJPEG; fmts[1] = V4L2_PIX_FMT_YUV420; fmts[2] = V4L2_PIX_FMT_YUYV; fmts[3] = V4L2_PIX_FMT_UYVY; fmts[4] = V4L2_PIX_FMT_JPEG; } else { fmts[0] = V4L2_PIX_FMT_YUV420; fmts[1] = V4L2_PIX_FMT_YUYV; fmts[2] = V4L2_PIX_FMT_UYVY; fmts[3] = V4L2_PIX_FMT_MJPEG; fmts[4] = V4L2_PIX_FMT_JPEG; } // Enumerate image formats. struct v4l2_fmtdesc fmt; int fmtsIdx = nFormats; memset(&fmt, 0, sizeof(fmt)); fmt.index = 0; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //CapLog("Video Capture enumerates supported image formats:"); while (ioctl(m_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) { char strformat[32] = { 0 }; GetFourccName(strformat, 32, fmt.pixelformat); //CapLog("pixelformat=%s, description='%s'", strformat, fmt.description); // Match the preferred order. for (int i = 0; i < nFormats; i++) { if (fmt.pixelformat == fmts[i] && i < fmtsIdx) fmtsIdx = i; } // Keep enumerating. fmt.index++; } if (fmtsIdx == nFormats) { CapLog("no supporting video formats found"); close(m_deviceFd); return -1; } else { char strformat[32] = { 0 }; GetFourccName(strformat, 32, fmts[fmtsIdx]); //char strmsg[256] = { 0 }; //snprintf(strmsg, 256, "we prefer format %s.", strformat); //CapLogEvent(1, strmsg); } struct v4l2_format video_fmt; memset(&video_fmt, 0, sizeof(v4l2_format)); video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; video_fmt.fmt.pix.field = V4L2_FIELD_ANY; video_fmt.fmt.pix.width = mode_width[m_capture->param.cap_mode]; video_fmt.fmt.pix.height = mode_height[m_capture->param.cap_mode]; video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx]; //CapLog("video_fmt.fmt.pix.width = %d, video_fmt.fmt.pix.height = %d.", video_fmt.fmt.pix.width, video_fmt.fmt.pix.height); if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) m_captureVideoType = VideoType::kYUY2; else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) m_captureVideoType = VideoType::kI420; else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) m_captureVideoType = VideoType::kUYVY; else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) m_captureVideoType = VideoType::kMJPEG; // set format and frame size now if (ioctl(m_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) { CapLog("error in VIDIOC_S_FMT for %s.", strerror(errno)); close(m_deviceFd); return -1; } else { if (ioctl(m_deviceFd, VIDIOC_G_FMT, &video_fmt) < 0){ CapLog("error in VIDIOC_G_FMT for %s.", strerror(errno)); close(m_deviceFd); return -1; } else { // initialize current width and height m_real_cap_width = video_fmt.fmt.pix.width; m_real_cap_height = video_fmt.fmt.pix.height; //CapLog("real camera capture m_capture_width = %d, m_capture_height = %d.", m_real_cap_width, m_real_cap_height); } } // Trying to set frame rate, before check driver capability. bool driver_framerate_support = true; struct v4l2_streamparm streamparms; memset(&streamparms, 0, sizeof(streamparms)); streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(m_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) { CapLog("error in VIDIOC_G_PARM,and error info is %s.", strerror(errno)); driver_framerate_support = false; // continue } else { // check the capability flag is set to V4L2_CAP_TIMEPERFRAME. if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { // driver supports the feature. Set required framerate. memset(&streamparms, 0, sizeof(streamparms)); streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; streamparms.parm.capture.timeperframe.numerator = 1; streamparms.parm.capture.timeperframe.denominator = (int32_t)m_capture->param.fps; if (ioctl(m_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) { CapLog("Failed to set the framerate. error info is %s.", strerror(errno)); driver_framerate_support = false; } else { m_currentFrameRate = (int32_t)m_capture->param.fps; //char strframerate[256] = { 0 }; //snprintf(strframerate, 256, "Set Camera video capture rate to %d, and numerator is %d, denominator is %d.", m_currentFrameRate, streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator); //CapLogEvent(0, strframerate); if (ioctl(m_deviceFd, VIDIOC_G_PARM, &streamparms) == 0) { //char stroutrate[256] = { 0 }; //snprintf(stroutrate, 256, "Get video capture numerator is %d, denominator is %d.", streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator); //CapLogEvent(1, stroutrate); } } } } // If driver doesn't support framerate control, need to hardcode. // Hardcoding the value based on the frame size. if (!driver_framerate_support) { if (m_in_cap_width >= 800 && m_captureVideoType != VideoType::kMJPEG) { m_currentFrameRate = 15; } else { m_currentFrameRate = 5; //CapLog("The Camera not support set video capture framerate, set capture rate to %d.", m_currentFrameRate); } } if (false == GetCamBrightnessInfo()) { close(m_deviceFd); return -1; } if (!AllocateVideoCapturebuffer()) { CapLog("failed to allocate video capture buffers"); close(m_deviceFd); return -1; } if (-1 == pthread_create(&m_CaptureThreadId, NULL, VideoCaptureProcess, this)) { CapLog("Create Video Capture Thread Failed!"); close(m_deviceFd); return -1; } // Needed to start UVC camera - from the uvcview application enum v4l2_buf_type type; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(m_deviceFd, VIDIOC_STREAMON, &type) == -1) { CapLog("failed to turn on stream for %s.", strerror(errno)); close(m_deviceFd); return -1; } m_bCaptureStarted = true; return 0; } bool VideoCaptureImpl::AllocateVideoCapturebuffer() { return AllocateVideoBuffers() && AlignedMallocVideoBuffer(); } //critical section protected by the caller bool VideoCaptureImpl::AllocateVideoBuffers() { struct v4l2_requestbuffers rbuffer; memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //缓冲帧数据格式 rbuffer.memory = V4L2_MEMORY_MMAP; //是内存映射还是用户指针方式 rbuffer.count = kNoOfV4L2Bufffers; //缓冲区缓冲帧的数目 //向设备申请缓冲区 if (ioctl(m_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0){ CapLog("Could not get buffers from device for %s.", strerror(errno)); return false; } if (rbuffer.count > kNoOfV4L2Bufffers) { rbuffer.count = kNoOfV4L2Bufffers; } m_buffersAllocatedByDevice = rbuffer.count; //Map the buffers m_pool = new Buffer[rbuffer.count]; for (unsigned int i = 0; i < rbuffer.count; i++) { struct v4l2_buffer buffer; memset(&buffer, 0, sizeof(v4l2_buffer)); buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buffer.memory = V4L2_MEMORY_MMAP; buffer.index = i; //获取缓冲帧的地址,长度 if (ioctl(m_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0){ return false; } m_pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, m_deviceFd, buffer.m.offset); if (MAP_FAILED == m_pool[i].start){ for (unsigned int j = 0; j < i; j++) munmap(m_pool[j].start, m_pool[j].length); return false; } m_pool[i].length = buffer.length; if (ioctl(m_deviceFd, VIDIOC_QBUF, &buffer) < 0){ return false; } } return true; } bool VideoCaptureImpl::DeAllocateVideoBuffers() { // unmap buffers for (int i = 0; i < m_buffersAllocatedByDevice; i++) { munmap(m_pool[i].start, m_pool[i].length); } delete[] m_pool; // turn off stream enum v4l2_buf_type type; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(m_deviceFd, VIDIOC_STREAMOFF, &type) < 0){ CapLog("VIDIOC_STREAMOFF error. error no: %d", errno); } return true; } bool VideoCaptureImpl::AlignedMallocVideoBuffer() { bool bret = false; int stride_y = m_in_cap_width; int stride_u = (m_in_cap_width + 1) / 2; int stride_v = (m_in_cap_width + 1) / 2; m_i420 = (uint8_t*)AlignedMalloc(I420DataSize(m_in_cap_height, stride_y, stride_u, stride_v), kBufferAlignment); m_rgb24 = (uint8_t*)AlignedMalloc(RGB24DataSize(m_out_cap_height, m_out_cap_width, (m_out_cap_width + 1) / 2, (m_out_cap_width + 1) / 2), kBufferAlignment); int opt_stride_y = m_out_cap_width; int opt_stride_u = (m_out_cap_width + 1) / 2; int opt_stride_v = (m_out_cap_width + 1) / 2; m_opti420 = (uint8_t*)AlignedMalloc(I420DataSize(m_out_cap_height, opt_stride_y, opt_stride_u, opt_stride_v), kBufferAlignment); if (m_i420 && m_rgb24 && m_opti420){ bret = true; } return bret; } bool VideoCaptureImpl::FreeAlignedMallocVideoBuffer() { if (NULL != m_i420){ AlignedFree(m_i420); m_i420 = NULL; } if (NULL != m_rgb24) { AlignedFree(m_rgb24); m_rgb24 = NULL; } if (NULL != m_opti420) { AlignedFree(m_opti420); m_opti420 = NULL; } return true; } bool VideoCaptureImpl::VideoCaptureStarted() { return m_bCaptureStarted; } int VideoCaptureImpl::GetCaptureVideoFd() { return m_deviceFd; } VideoType VideoCaptureImpl::GetCaptureVideoType() { return m_captureVideoType; } int VideoCaptureImpl::GetCapture_Width() { return m_real_cap_width; } int VideoCaptureImpl::GetCapture_Height() { return m_real_cap_height; } bool VideoCaptureImpl::GetStopCaptureFlag() { return m_bStopCapture; } int VideoCaptureImpl::StopVideoCapture() { if (m_bCaptureStarted){ m_bCaptureStarted = false; m_bStopCapture = true; if (0 == pthread_join(m_CaptureThreadId, NULL)) { m_CaptureThreadId = 0; CapLog("thread join video capture thread success."); } else { CapLog("thread join video capture thread failed for %s.", strerror(errno)); } DeAllocateVideoBuffers(); FreeAlignedMallocVideoBuffer(); close(m_deviceFd); m_deviceFd = -1; CapLog("video capture has stopped!"); } return 0; } void VideoCaptureImpl::VideoCaptureDestroy() { delete this; } int VideoCaptureImpl::GetCamBrightness(int* ibright, bool bRawRange) { int iret = -1; struct v4l2_control ctrl; ctrl.id = V4L2_CID_BRIGHTNESS; if (ioctl(m_deviceFd,VIDIOC_G_CTRL,&ctrl) == -1){ CapLog("VIDIOC_S_CTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno)); } else { if (bRawRange) { *ibright = ctrl.value; } else { *ibright = TransFromRealBrightnessValue(ctrl.value); } iret = 0; } return iret; } int VideoCaptureImpl::SetCamBrightness(int ibright, bool bRawRange) { int iret = -1; struct v4l2_control ctrl; ctrl.id = V4L2_CID_BRIGHTNESS; if (bRawRange) { ctrl.value = ibright; } else { ctrl.value = TransToRealBrightnessValue(ibright); } if (ioctl(m_deviceFd, VIDIOC_S_CTRL, &ctrl) == -1){ CapLog("VIDIOC_S_CTRL set V4L2_CID_BRIGHTNESS error for %s.", strerror(errno)); } else{ iret = 0; } return iret; } int VideoCaptureImpl::SetCamAutoBrightness() { int iret = -1; struct v4l2_control ctrl; ctrl.id = V4L2_CID_BRIGHTNESS; ctrl.value = m_idefaultbrightness; if (ioctl(m_deviceFd, VIDIOC_S_CTRL, &ctrl) == -1) { CapLog("VIDIOC_S_CTRL set V4L2_CID_AUTOBRIGHTNESS error for %s", strerror(errno)); } else { iret = 0; } iret = 0; return iret; } bool VideoCaptureImpl::GetCamBrightnessInfo() { bool bret = false; struct v4l2_queryctrl qctrl; qctrl.id = V4L2_CID_BRIGHTNESS; if (ioctl(m_deviceFd, VIDIOC_QUERYCTRL, &qctrl) == -1) { CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno)); } else { //CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS success {min(%d) - max(%d)},default is %d", qctrl.minimum, qctrl.maximum, qctrl.default_value); m_idefaultbrightness = qctrl.default_value; m_iminbrightness = qctrl.minimum; m_imaxbrightness = qctrl.maximum; bret = true; } return bret; } bool VideoCaptureImpl::GetCamRawBrightnessRange(int* imin, int* imax) { bool bret = false; struct v4l2_queryctrl qctrl; qctrl.id = V4L2_CID_BRIGHTNESS; if (ioctl(m_deviceFd, VIDIOC_QUERYCTRL, &qctrl) == -1) { CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno)); } else { CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS success {min(%d) - max(%d)}, default is %d.", qctrl.minimum, qctrl.maximum, qctrl.default_value); *imin = qctrl.minimum; *imax = qctrl.maximum; bret = true; } return bret; } //100 to real brightness value int VideoCaptureImpl::TransToRealBrightnessValue(int ibright) { float fvalue = ibright * (m_imaxbrightness - m_iminbrightness) / 10; int ivalue = fvalue; int ilast = ivalue % 10; int inum = ivalue / 10; if (ilast >= 5) { inum++; } inum += m_iminbrightness; if (inum < m_iminbrightness){ inum = m_iminbrightness; } if (inum > m_imaxbrightness){ inum = m_imaxbrightness; } return inum; } //real brightness value to [0-100] int VideoCaptureImpl::TransFromRealBrightnessValue(int ibright) { int itotal = m_imaxbrightness - m_iminbrightness; int ivalue = ibright - m_iminbrightness; float fvalue = ivalue * 1000 / itotal; ivalue = fvalue; int ilast = ivalue % 10; int inum = ivalue / 10; if (ilast >= 5) { inum++; } return inum; } libyuv::RotationMode VideoCaptureImpl::RotateTrans(int irotate) { libyuv::RotationMode rotation_mode = libyuv::kRotate0; switch (irotate) { case 0: rotation_mode = libyuv::kRotate0; break; case 90: rotation_mode = libyuv::kRotate90; break; case 180: rotation_mode = libyuv::kRotate180; break; case 270: rotation_mode = libyuv::kRotate270; break; } return rotation_mode; } void VideoCaptureImpl::CapLog(const char* fmt, ...) { if (m_callback.debug) { va_list arg; va_start(arg, fmt); (*m_callback.debug)(m_callback.user_data, fmt, arg); va_end(arg); } } void VideoCaptureImpl::CapLogEvent(int itype, const char* strmessage) { if (m_callback.logevent) { (*m_callback.logevent)(itype, strmessage); } }