|
|
@@ -47,19 +47,22 @@ int GetFourccName(char* strbuf, uint32_t ulen, uint32_t fourcc)
|
|
|
return iret;
|
|
|
}
|
|
|
|
|
|
-VideoCaptureImpl::VideoCaptureImpl(ICaptureCallback* pCallback)
|
|
|
+VideoCaptureImpl::VideoCaptureImpl(videocap_callback_t* pCallback)
|
|
|
{
|
|
|
- m_callback = pCallback;
|
|
|
+ memcpy(&m_callback, pCallback, sizeof(videocap_callback_t));
|
|
|
m_capture = NULL;
|
|
|
m_bCaptureStarted = false;
|
|
|
|
|
|
m_deviceId = -1;
|
|
|
m_deviceFd = -1;
|
|
|
|
|
|
- m_capture_width = 0;
|
|
|
- m_capture_height = 0;
|
|
|
- m_dest_cap_width = 0;
|
|
|
- m_dest_cap_height = 0;
|
|
|
+ m_in_cap_width = 0;
|
|
|
+ m_in_cap_height = 0;
|
|
|
+ m_real_cap_width = 0;
|
|
|
+ m_real_cap_height = 0;
|
|
|
+ m_out_cap_width = 0;
|
|
|
+ m_out_cap_height = 0;
|
|
|
+
|
|
|
m_rotate = libyuv::kRotate0;
|
|
|
m_frame_fmt = VIDEO_FORMAT_I420;
|
|
|
m_captureVideoType = VideoType::kI420;
|
|
|
@@ -137,14 +140,10 @@ int VideoCaptureImpl::VideoCaptureSetParam(videocap_param_t* param)
|
|
|
}
|
|
|
}
|
|
|
else {
|
|
|
- if (m_callback) {
|
|
|
- m_callback->Debug("%s", "param->option & VIDEOCAP_OPT_EANBLE_RESIZE success.");
|
|
|
- }
|
|
|
+ CapLog("%s", "param->option & VIDEOCAP_OPT_EANBLE_RESIZE success.");
|
|
|
}
|
|
|
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("%s:%d param->option = %d.", __FUNCTION__, __LINE__, param->option);
|
|
|
- }
|
|
|
+ CapLog("%s:%d param->option = %d.", __FUNCTION__, __LINE__, param->option);
|
|
|
|
|
|
m_capture = (videocap_t*)malloc(sizeof(videocap_t));
|
|
|
if (!m_capture) {
|
|
|
@@ -157,9 +156,8 @@ int VideoCaptureImpl::VideoCaptureSetParam(videocap_param_t* param)
|
|
|
if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
|
|
|
int width = mode_width[param->cap_mode];
|
|
|
int height = mode_height[param->cap_mode];
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("%s:%d, width = %d, height = %d.", __FUNCTION__, __LINE__, width, height);
|
|
|
- }
|
|
|
+
|
|
|
+ CapLog("%s:%d, width = %d, height = %d.", __FUNCTION__, __LINE__, width, height);
|
|
|
|
|
|
if (video_frame_alloc(width, height, param->frame_fmt, &m_capture->cap_frame) != 0) {
|
|
|
free(m_capture);
|
|
|
@@ -168,10 +166,9 @@ int VideoCaptureImpl::VideoCaptureSetParam(videocap_param_t* param)
|
|
|
video_frame_fill_black(&m_capture->cap_frame);
|
|
|
}
|
|
|
else{
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("param->option & VIDEOCAP_OPT_ENABLE_GRAB is false");
|
|
|
- }
|
|
|
+ CapLog("param->option & VIDEOCAP_OPT_ENABLE_GRAB is false");
|
|
|
}
|
|
|
+
|
|
|
if (param->option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB) {
|
|
|
|
|
|
}
|
|
|
@@ -179,9 +176,8 @@ int VideoCaptureImpl::VideoCaptureSetParam(videocap_param_t* param)
|
|
|
if (param->option & VIDEOCAP_OPT_EANBLE_RESIZE) {
|
|
|
int width = mode_width[param->res_mode];
|
|
|
int height = mode_height[param->res_mode];
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("%s:%d, width = %d, height = %d.", __FUNCTION__, __LINE__, width, height);
|
|
|
- }
|
|
|
+
|
|
|
+ CapLog("%s:%d, width = %d, height = %d.", __FUNCTION__, __LINE__, width, height);
|
|
|
|
|
|
if (video_frame_alloc(width, height, param->frame_fmt, &m_capture->res_frame) != 0) {
|
|
|
if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
|
|
|
@@ -211,18 +207,17 @@ int VideoCaptureImpl::VideoCaptureSetParam(videocap_param_t* param)
|
|
|
return -1;
|
|
|
}
|
|
|
}
|
|
|
- if (m_callback) {
|
|
|
- m_callback->Debug("%s:%d, param->irotate %d.", __FUNCTION__, __LINE__, param->irotate);
|
|
|
- }
|
|
|
+
|
|
|
+ CapLog("%s:%d, param->irotate %d.", __FUNCTION__, __LINE__, param->irotate);
|
|
|
|
|
|
m_rotate = RotateTrans(param->irotate);
|
|
|
|
|
|
- m_dest_cap_width = mode_width[m_capture->param.cap_mode];
|
|
|
- m_dest_cap_height = mode_height[m_capture->param.cap_mode];
|
|
|
+ m_in_cap_width = m_out_cap_width = mode_width[m_capture->param.cap_mode];
|
|
|
+ m_in_cap_height = m_out_cap_height = mode_height[m_capture->param.cap_mode];
|
|
|
|
|
|
if (libyuv::kRotate90 == m_rotate || libyuv::kRotate270 == m_rotate){
|
|
|
- m_dest_cap_width = mode_height[m_capture->param.cap_mode];
|
|
|
- m_dest_cap_height = mode_width[m_capture->param.cap_mode];
|
|
|
+ m_out_cap_width = mode_height[m_capture->param.cap_mode];
|
|
|
+ m_out_cap_height = mode_width[m_capture->param.cap_mode];
|
|
|
}
|
|
|
|
|
|
return 0;
|
|
|
@@ -344,25 +339,20 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
|
|
|
const int32_t height = frameInfo.height;
|
|
|
|
|
|
if (0 == m_ilogcount){
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("IncomingFrame capture_time is %d, videoType=%d, rotate=%d, videoFrameLength=%d, width=%d, height=%d, and destination width=%d, height=%d.", captureTime, frameInfo.videoType, m_rotate, videoFrameLength, width, height, m_dest_cap_width, m_dest_cap_height);
|
|
|
- }
|
|
|
+ CapLog("IncomingFrame capture_time is %d, videoType=%d, rotate=%d, videoFrameLength=%d, width=%d, height=%d, and destination width=%d, height=%d.", captureTime, frameInfo.videoType, m_rotate, videoFrameLength, width, height, m_out_cap_width, m_out_cap_height);
|
|
|
m_ilogcount++;
|
|
|
}
|
|
|
|
|
|
// Not encoded, convert to I420.
|
|
|
if (frameInfo.videoType != VideoType::kMJPEG &&
|
|
|
CalcBufferSize(frameInfo.videoType, width, abs(height)) != videoFrameLength) {
|
|
|
- if (m_callback) {
|
|
|
- m_callback->Debug("Wrong incoming frame length.");
|
|
|
- }
|
|
|
-
|
|
|
+ CapLog("Wrong incoming frame length.");
|
|
|
return -1;
|
|
|
}
|
|
|
|
|
|
- int stride_y = width;
|
|
|
- int stride_u = (width + 1)/2;
|
|
|
- int stride_v = (width + 1)/2;
|
|
|
+ int stride_y = m_in_cap_width;
|
|
|
+ int stride_u = (m_in_cap_width + 1)/2;
|
|
|
+ int stride_v = (m_in_cap_width + 1)/2;
|
|
|
|
|
|
//uint8_t* i420y = (uint8_t*)AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v), kBufferAlignment);
|
|
|
//uint8_t* brg24 = (uint8_t*)AlignedMalloc(RGB24DataSize(m_dest_cap_height, m_dest_cap_width, (m_dest_cap_width+1)/2, (m_dest_cap_width + 1) / 2), kBufferAlignment);
|
|
|
@@ -370,56 +360,52 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
|
|
|
int conversionResult = libyuv::ConvertToI420(videoFrame, videoFrameLength,
|
|
|
m_i420,
|
|
|
stride_y,
|
|
|
- m_i420 + stride_y * height,
|
|
|
+ m_i420 + stride_y * m_in_cap_height,
|
|
|
stride_u,
|
|
|
- m_i420 + stride_y * height + stride_u * ((height + 1) / 2),
|
|
|
+ m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2),
|
|
|
stride_v,
|
|
|
0,
|
|
|
- 0, // No Cropping
|
|
|
+ (height - m_in_cap_height) / 2, // No Cropping
|
|
|
width,
|
|
|
height,
|
|
|
width,
|
|
|
- height,
|
|
|
+ m_in_cap_height,
|
|
|
libyuv::kRotate180,
|
|
|
ConvertVideoType(frameInfo.videoType)
|
|
|
);
|
|
|
|
|
|
if (conversionResult < 0) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("Failed to convert capture frame from type %d to I420 for %s.", static_cast<int>(frameInfo.videoType), strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("Failed to convert capture frame from type %d to I420 for %s.", static_cast<int>(frameInfo.videoType), strerror(errno));
|
|
|
return -1;
|
|
|
}
|
|
|
|
|
|
//{
|
|
|
// video_frame frmi420 = { 0 };
|
|
|
// frmi420.data[0] = m_i420;
|
|
|
- // frmi420.linesize[0] = m_dest_cap_width * 3 / 2;
|
|
|
- // frmi420.width = m_dest_cap_width;
|
|
|
- // frmi420.height = m_dest_cap_height;
|
|
|
+ // frmi420.linesize[0] = m_in_cap_height * 3 / 2;
|
|
|
+ // frmi420.width = m_in_cap_width;
|
|
|
+ // frmi420.height = m_in_cap_height;
|
|
|
// frmi420.format = VIDEO_FORMAT_I420;
|
|
|
// //m_capture->param.on_frame_i420(m_capture->param.user_data, &frmi420);
|
|
|
// char stroptname[260] = { 0 };
|
|
|
- // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_dest_cap_width, m_dest_cap_height);
|
|
|
+ // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_in_cap_width, m_in_cap_height);
|
|
|
// video_frame_save_bmpfile(stroptname, &frmi420);
|
|
|
//}
|
|
|
|
|
|
if (libyuv::kRotate0 == m_rotate || libyuv::kRotate180 == m_rotate){
|
|
|
conversionResult = libyuv::ConvertFromI420(m_i420,
|
|
|
stride_y,
|
|
|
- m_i420 + stride_y * m_capture_height,
|
|
|
+ m_i420 + stride_y * m_in_cap_height,
|
|
|
stride_u,
|
|
|
- m_i420 + stride_y * m_capture_height + stride_u * ((m_capture_height + 1) / 2),
|
|
|
+ m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2),
|
|
|
stride_v,
|
|
|
m_rgb24,
|
|
|
- m_dest_cap_width * 3,
|
|
|
- m_dest_cap_width,
|
|
|
- m_dest_cap_height,
|
|
|
+ m_out_cap_width * 3,
|
|
|
+ m_out_cap_width,
|
|
|
+ m_out_cap_height,
|
|
|
ConvertVideoType(kRGB24));
|
|
|
if (conversionResult < 0) {
|
|
|
- if (m_callback) {
|
|
|
- m_callback->Debug("Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno));
|
|
|
return -1;
|
|
|
}
|
|
|
}
|
|
|
@@ -429,63 +415,59 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
|
|
|
if (libyuv::kRotate90 == m_rotate) {
|
|
|
erotate = libyuv::kRotate270;
|
|
|
}
|
|
|
- int opt_stride_y = m_dest_cap_width;
|
|
|
- int opt_stride_u = (m_dest_cap_width + 1) / 2;
|
|
|
- int opt_stride_v = (m_dest_cap_width + 1) / 2;
|
|
|
+ int opt_stride_y = m_out_cap_width;
|
|
|
+ int opt_stride_u = (m_out_cap_width + 1) / 2;
|
|
|
+ int opt_stride_v = (m_out_cap_width + 1) / 2;
|
|
|
//uint8_t* iopt420 = (uint8_t*)AlignedMalloc(I420DataSize(m_dest_cap_height, opt_stride_y, opt_stride_u, opt_stride_v), kBufferAlignment);
|
|
|
int rotateResult = libyuv::I420Rotate(m_i420,
|
|
|
stride_y,
|
|
|
- m_i420 + stride_y * height,
|
|
|
+ m_i420 + stride_y * m_in_cap_height,
|
|
|
stride_u,
|
|
|
- m_i420 + stride_y * height + stride_u * ((height + 1) / 2),
|
|
|
+ m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2),
|
|
|
stride_v,
|
|
|
m_opti420,
|
|
|
opt_stride_y,
|
|
|
- m_opti420 + opt_stride_y * m_dest_cap_height,
|
|
|
+ m_opti420 + opt_stride_y * m_out_cap_height,
|
|
|
opt_stride_u,
|
|
|
- m_opti420 + opt_stride_y * m_dest_cap_height + opt_stride_u * ((m_dest_cap_height + 1) / 2),
|
|
|
+ m_opti420 + opt_stride_y * m_out_cap_height + opt_stride_u * ((m_out_cap_height + 1) / 2),
|
|
|
opt_stride_v,
|
|
|
- width,
|
|
|
- height,
|
|
|
+ m_in_cap_width,
|
|
|
+ m_in_cap_height,
|
|
|
erotate);
|
|
|
|
|
|
if (rotateResult < 0) {
|
|
|
- if (m_callback) {
|
|
|
- m_callback->Debug("Failed to Rotate Frame %d for %s.", (int)erotate, strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("Failed to Rotate Frame %d for %s.", (int)erotate, strerror(errno));
|
|
|
return -1;
|
|
|
}
|
|
|
|
|
|
//{
|
|
|
// video_frame frmi420 = { 0 };
|
|
|
// frmi420.data[0] = m_opti420;
|
|
|
- // frmi420.linesize[0] = m_dest_cap_width * 3 / 2;
|
|
|
- // frmi420.width = m_dest_cap_width;
|
|
|
- // frmi420.height = m_dest_cap_height;
|
|
|
+ // frmi420.linesize[0] = m_out_cap_width * 3 / 2;
|
|
|
+ // frmi420.width = m_out_cap_width;
|
|
|
+ // frmi420.height = m_out_cap_height;
|
|
|
// frmi420.format = VIDEO_FORMAT_I420;
|
|
|
// //m_capture->param.on_frame_i420(m_capture->param.user_data, &frmi420);
|
|
|
// char stroptname[260] = { 0 };
|
|
|
- // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_dest_cap_width, m_dest_cap_height);
|
|
|
+ // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_out_cap_width, m_out_cap_height);
|
|
|
// video_frame_save_bmpfile(stroptname, &frmi420);
|
|
|
//}
|
|
|
//yu12_to_dib24(brg24, iopt420, m_dest_cap_width, m_dest_cap_height);
|
|
|
|
|
|
conversionResult = libyuv::ConvertFromI420(m_opti420,
|
|
|
opt_stride_y,
|
|
|
- m_opti420 + opt_stride_y * m_dest_cap_height,
|
|
|
+ m_opti420 + opt_stride_y * m_out_cap_height,
|
|
|
opt_stride_u,
|
|
|
- m_opti420 + opt_stride_y * m_dest_cap_height + opt_stride_u * ((m_dest_cap_height + 1) / 2),
|
|
|
+ m_opti420 + opt_stride_y * m_out_cap_height + opt_stride_u * ((m_out_cap_height + 1) / 2),
|
|
|
opt_stride_v,
|
|
|
m_rgb24,
|
|
|
- m_dest_cap_width * 3,
|
|
|
- m_dest_cap_width,
|
|
|
- m_dest_cap_height,
|
|
|
+ m_out_cap_width * 3,
|
|
|
+ m_out_cap_width,
|
|
|
+ m_out_cap_height,
|
|
|
ConvertVideoType(kRGB24));
|
|
|
|
|
|
if (conversionResult < 0) {
|
|
|
- if (m_callback) {
|
|
|
- m_callback->Debug("Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno));
|
|
|
return -1;
|
|
|
}
|
|
|
//AlignedFree(iopt420);
|
|
|
@@ -496,14 +478,14 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
|
|
|
if (NULL != m_capture->param.on_frame) {
|
|
|
video_frame frm = { 0 };
|
|
|
frm.data[0] = m_rgb24;
|
|
|
- frm.linesize[0] = m_dest_cap_width * 3;
|
|
|
- frm.width = m_dest_cap_width;
|
|
|
- frm.height = m_dest_cap_height;
|
|
|
+ frm.linesize[0] = m_out_cap_width * 3;
|
|
|
+ frm.width = m_out_cap_width;
|
|
|
+ frm.height = m_out_cap_height;
|
|
|
frm.format = VIDEO_FORMAT_RGB24;
|
|
|
|
|
|
m_capture->param.on_frame(m_capture->param.user_data, &frm);
|
|
|
//char strrgbname[260] = { 0 };
|
|
|
- //snprintf(strrgbname, 260, "%d_%d_%d_%d_rgb.bmp", m_ilogcount, (int)m_rotate, m_dest_cap_width, m_dest_cap_height);
|
|
|
+ //snprintf(strrgbname, 260, "%d_%d_%d_%d_rgb.bmp", m_ilogcount, (int)m_rotate, m_out_cap_width, m_out_cap_height);
|
|
|
//video_frame_save_bmpfile(strrgbname, &frm);
|
|
|
//m_ilogcount++;
|
|
|
}
|
|
|
@@ -528,7 +510,6 @@ static void* VideoCaptureProcess(void *arg)
|
|
|
VideoCaptureImpl* pVideoCapture = (VideoCaptureImpl*)arg;
|
|
|
int iDeviceFd = pVideoCapture->GetCaptureVideoFd();
|
|
|
|
|
|
- ICaptureCallback* pcallback = pVideoCapture->GetCaptureCallback();
|
|
|
while (false == pVideoCapture->GetStopCaptureFlag())
|
|
|
{
|
|
|
FD_ZERO(&rSet);
|
|
|
@@ -540,24 +521,24 @@ static void* VideoCaptureProcess(void *arg)
|
|
|
if (retVal < 0 && errno != EINTR) // continue if interrupted
|
|
|
{
|
|
|
// select failed
|
|
|
- if (pcallback){
|
|
|
- pcallback->Debug("exit for select failed.");
|
|
|
+ if (pVideoCapture){
|
|
|
+ pVideoCapture->CapLog("exit for select failed.");
|
|
|
}
|
|
|
|
|
|
return NULL;
|
|
|
}
|
|
|
else if (retVal == 0) {
|
|
|
// select timed out
|
|
|
- if (pcallback){
|
|
|
- pcallback->Debug("exit for select timed out.");
|
|
|
+ if (pVideoCapture){
|
|
|
+ pVideoCapture->CapLog("exit for select timed out.");
|
|
|
}
|
|
|
|
|
|
return NULL;
|
|
|
}
|
|
|
else if (!FD_ISSET(iDeviceFd, &rSet)) {
|
|
|
// not event on camera handle
|
|
|
- if (pcallback){
|
|
|
- pcallback->Debug("exit for not event on camera handle.");
|
|
|
+ if (pVideoCapture){
|
|
|
+ pVideoCapture->CapLog("exit for not event on camera handle.");
|
|
|
}
|
|
|
|
|
|
return NULL;
|
|
|
@@ -571,8 +552,8 @@ static void* VideoCaptureProcess(void *arg)
|
|
|
// dequeue a buffer - repeat until dequeued properly!
|
|
|
while (ioctl(iDeviceFd, VIDIOC_DQBUF, &buf) < 0) {
|
|
|
if (errno != EINTR) {
|
|
|
- if (pcallback){
|
|
|
- pcallback->Debug("could not sync on a buffer on device %s.", strerror(errno));
|
|
|
+ if (pVideoCapture){
|
|
|
+ pVideoCapture->CapLog("could not sync on a buffer on device %s.", strerror(errno));
|
|
|
}
|
|
|
return NULL;
|
|
|
}
|
|
|
@@ -588,8 +569,8 @@ static void* VideoCaptureProcess(void *arg)
|
|
|
pVideoCapture->IncomingFrame((unsigned char*)buffer_pool[buf.index].start, buf.length, frameInfo);
|
|
|
// enqueue the buffer again
|
|
|
if (ioctl(iDeviceFd, VIDIOC_QBUF, &buf) == -1) {
|
|
|
- if (pcallback){
|
|
|
- pcallback->Debug("Failed to enqueue capture buffer");
|
|
|
+ if (pVideoCapture){
|
|
|
+ pVideoCapture->CapLog("Failed to enqueue capture buffer");
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
@@ -604,8 +585,8 @@ static void* VideoCaptureProcess(void *arg)
|
|
|
int VideoCaptureImpl::StartVideoCapture()
|
|
|
{
|
|
|
if (m_bCaptureStarted){
|
|
|
- if (m_capture_width == mode_width[m_capture->param.cap_mode] &&
|
|
|
- m_capture_height == mode_height[m_capture->param.cap_mode] &&
|
|
|
+ if (m_real_cap_width == mode_width[m_capture->param.cap_mode] &&
|
|
|
+ m_real_cap_height == mode_height[m_capture->param.cap_mode] &&
|
|
|
m_frame_fmt == m_capture->param.frame_fmt){
|
|
|
return 0;
|
|
|
}
|
|
|
@@ -617,9 +598,7 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
char device[20] = {0};
|
|
|
snprintf(device, 20,"/dev/video%d", (int)m_deviceId);
|
|
|
if ((m_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("error in opening %s for %s.", device, strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("error in opening %s for %s.", device, strerror(errno));
|
|
|
return -1;
|
|
|
}
|
|
|
|
|
|
@@ -649,15 +628,12 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
memset(&fmt, 0, sizeof(fmt));
|
|
|
fmt.index = 0;
|
|
|
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("Video Capture enumerates supported image formats:");
|
|
|
- }
|
|
|
+ CapLog("Video Capture enumerates supported image formats:");
|
|
|
+
|
|
|
while (ioctl(m_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
|
|
|
char strformat[32] = { 0 };
|
|
|
GetFourccName(strformat, 32, fmt.pixelformat);
|
|
|
- if (m_callback) {
|
|
|
- m_callback->Debug("pixelformat=%s, description='%s'", strformat, fmt.description);
|
|
|
- }
|
|
|
+ CapLog("pixelformat=%s, description='%s'", strformat, fmt.description);
|
|
|
// Match the preferred order.
|
|
|
for (int i = 0; i < nFormats; i++) {
|
|
|
if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
|
|
|
@@ -668,18 +644,14 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
}
|
|
|
|
|
|
if (fmtsIdx == nFormats) {
|
|
|
- if (m_callback) {
|
|
|
- m_callback->Debug("no supporting video formats found");
|
|
|
- }
|
|
|
+ CapLog("no supporting video formats found");
|
|
|
close(m_deviceFd);
|
|
|
return -1;
|
|
|
}
|
|
|
else {
|
|
|
char strformat[32] = { 0 };
|
|
|
GetFourccName(strformat, 32, fmts[fmtsIdx]);
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("we prefer format %s.", strformat);
|
|
|
- }
|
|
|
+ CapLog("we prefer format %s.", strformat);
|
|
|
}
|
|
|
|
|
|
struct v4l2_format video_fmt;
|
|
|
@@ -689,11 +661,8 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
video_fmt.fmt.pix.width = mode_width[m_capture->param.cap_mode];
|
|
|
video_fmt.fmt.pix.height = mode_height[m_capture->param.cap_mode];
|
|
|
video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
|
|
|
-
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("video_fmt.fmt.pix.width = %d, video_fmt.fmt.pix.height = %d.", video_fmt.fmt.pix.width, video_fmt.fmt.pix.height);
|
|
|
- }
|
|
|
-
|
|
|
+ CapLog("video_fmt.fmt.pix.width = %d, video_fmt.fmt.pix.height = %d.", video_fmt.fmt.pix.width, video_fmt.fmt.pix.height);
|
|
|
+
|
|
|
if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
|
|
|
m_captureVideoType = VideoType::kYUY2;
|
|
|
else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
|
|
|
@@ -706,29 +675,23 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
|
|
|
// set format and frame size now
|
|
|
if (ioctl(m_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("error in VIDIOC_S_FMT for %s.", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("error in VIDIOC_S_FMT for %s.", strerror(errno));
|
|
|
close(m_deviceFd);
|
|
|
return -1;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
if (ioctl(m_deviceFd, VIDIOC_G_FMT, &video_fmt) < 0){
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("error in VIDIOC_G_FMT for %s.", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("error in VIDIOC_G_FMT for %s.", strerror(errno));
|
|
|
close(m_deviceFd);
|
|
|
return -1;
|
|
|
}
|
|
|
else
|
|
|
{
|
|
|
// initialize current width and height
|
|
|
- m_capture_width = video_fmt.fmt.pix.width;
|
|
|
- m_capture_height = video_fmt.fmt.pix.height;
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("real camera capture m_capture_width = %d, m_capture_height = %d.", m_capture_width, m_capture_height);
|
|
|
- }
|
|
|
+ m_real_cap_width = video_fmt.fmt.pix.width;
|
|
|
+ m_real_cap_height = video_fmt.fmt.pix.height;
|
|
|
+ CapLog("real camera capture m_capture_width = %d, m_capture_height = %d.", m_real_cap_width, m_real_cap_height);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
@@ -738,9 +701,7 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
memset(&streamparms, 0, sizeof(streamparms));
|
|
|
streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
|
if (ioctl(m_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("error in VIDIOC_G_PARM,and error info is %s.", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("error in VIDIOC_G_PARM,and error info is %s.", strerror(errno));
|
|
|
driver_framerate_support = false;
|
|
|
// continue
|
|
|
}
|
|
|
@@ -752,26 +713,18 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
|
streamparms.parm.capture.timeperframe.numerator = 1;
|
|
|
streamparms.parm.capture.timeperframe.denominator = (int32_t)m_capture->param.fps;
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("Set Camera video capture timeperframe numerator is %d, denominator is %d.", streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
|
|
|
- }
|
|
|
+ CapLog("Set Camera video capture timeperframe numerator is %d, denominator is %d.", streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
|
|
|
|
|
|
if (ioctl(m_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("Failed to set the framerate. error info is %s.", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("Failed to set the framerate. error info is %s.", strerror(errno));
|
|
|
driver_framerate_support = false;
|
|
|
}
|
|
|
else {
|
|
|
m_currentFrameRate = (int32_t)m_capture->param.fps;
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("Set Camera video capture rate to %d, and numerator is %d, denominator is %d.", m_currentFrameRate, streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
|
|
|
- }
|
|
|
+ CapLog("Set Camera video capture rate to %d, and numerator is %d, denominator is %d.", m_currentFrameRate, streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
|
|
|
|
|
|
- if (ioctl(m_deviceFd, VIDIOC_G_PARM, &streamparms) == 0) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("Get video capture numerator is %d, denominator is %d.", streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
|
|
|
- }
|
|
|
+ if (ioctl(m_deviceFd, VIDIOC_G_PARM, &streamparms) == 0) {
|
|
|
+ CapLog("Get video capture numerator is %d, denominator is %d.", streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
@@ -780,14 +733,12 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
// If driver doesn't support framerate control, need to hardcode.
|
|
|
// Hardcoding the value based on the frame size.
|
|
|
if (!driver_framerate_support) {
|
|
|
- if (m_capture_width >= 800 && m_captureVideoType != VideoType::kMJPEG) {
|
|
|
+ if (m_in_cap_width >= 800 && m_captureVideoType != VideoType::kMJPEG) {
|
|
|
m_currentFrameRate = 15;
|
|
|
}
|
|
|
else {
|
|
|
m_currentFrameRate = 5;
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("The Camera not support set video capture framerate, set capture rate to %d.", m_currentFrameRate);
|
|
|
- }
|
|
|
+ CapLog("The Camera not support set video capture framerate, set capture rate to %d.", m_currentFrameRate);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
@@ -797,22 +748,16 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
}
|
|
|
|
|
|
if (!AllocateVideoCapturebuffer()) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("failed to allocate video capture buffers");
|
|
|
- }
|
|
|
+ CapLog("failed to allocate video capture buffers");
|
|
|
close(m_deviceFd);
|
|
|
return -1;
|
|
|
}
|
|
|
else{
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("allocate video capture buffers success!");
|
|
|
- }
|
|
|
+ CapLog("allocate video capture buffers success!");
|
|
|
}
|
|
|
|
|
|
if (-1 == pthread_create(&m_CaptureThreadId, NULL, VideoCaptureProcess, this)) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("Create Video Capture Thread Failed!");
|
|
|
- }
|
|
|
+ CapLog("Create Video Capture Thread Failed!");
|
|
|
close(m_deviceFd);
|
|
|
return -1;
|
|
|
}
|
|
|
@@ -821,9 +766,7 @@ int VideoCaptureImpl::StartVideoCapture()
|
|
|
enum v4l2_buf_type type;
|
|
|
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
|
if (ioctl(m_deviceFd, VIDIOC_STREAMON, &type) == -1) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("failed to turn on stream for %s.", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("failed to turn on stream for %s.", strerror(errno));
|
|
|
close(m_deviceFd);
|
|
|
return -1;
|
|
|
}
|
|
|
@@ -850,9 +793,7 @@ bool VideoCaptureImpl::AllocateVideoBuffers()
|
|
|
|
|
|
//向设备申请缓冲区
|
|
|
if (ioctl(m_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0){
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("Could not get buffers from device for %s.", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("Could not get buffers from device for %s.", strerror(errno));
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
@@ -908,9 +849,7 @@ bool VideoCaptureImpl::DeAllocateVideoBuffers()
|
|
|
enum v4l2_buf_type type;
|
|
|
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
|
if (ioctl(m_deviceFd, VIDIOC_STREAMOFF, &type) < 0){
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("VIDIOC_STREAMOFF error. error no: %d", errno);
|
|
|
- }
|
|
|
+ CapLog("VIDIOC_STREAMOFF error. error no: %d", errno);
|
|
|
}
|
|
|
|
|
|
return true;
|
|
|
@@ -920,17 +859,17 @@ bool VideoCaptureImpl::DeAllocateVideoBuffers()
|
|
|
bool VideoCaptureImpl::AlignedMallocVideoBuffer()
|
|
|
{
|
|
|
bool bret = false;
|
|
|
- int stride_y = m_capture_width;
|
|
|
- int stride_u = (m_capture_width + 1) / 2;
|
|
|
- int stride_v = (m_capture_width + 1) / 2;
|
|
|
+ int stride_y = m_in_cap_width;
|
|
|
+ int stride_u = (m_in_cap_width + 1) / 2;
|
|
|
+ int stride_v = (m_in_cap_width + 1) / 2;
|
|
|
|
|
|
- m_i420 = (uint8_t*)AlignedMalloc(I420DataSize(m_capture_height, stride_y, stride_u, stride_v), kBufferAlignment);
|
|
|
- m_rgb24 = (uint8_t*)AlignedMalloc(RGB24DataSize(m_dest_cap_height, m_dest_cap_width, (m_dest_cap_width + 1) / 2, (m_dest_cap_width + 1) / 2), kBufferAlignment);
|
|
|
+ m_i420 = (uint8_t*)AlignedMalloc(I420DataSize(m_in_cap_height, stride_y, stride_u, stride_v), kBufferAlignment);
|
|
|
+ m_rgb24 = (uint8_t*)AlignedMalloc(RGB24DataSize(m_out_cap_height, m_out_cap_width, (m_out_cap_width + 1) / 2, (m_out_cap_width + 1) / 2), kBufferAlignment);
|
|
|
|
|
|
- int opt_stride_y = m_dest_cap_width;
|
|
|
- int opt_stride_u = (m_dest_cap_width + 1) / 2;
|
|
|
- int opt_stride_v = (m_dest_cap_width + 1) / 2;
|
|
|
- m_opti420 = (uint8_t*)AlignedMalloc(I420DataSize(m_dest_cap_height, opt_stride_y, opt_stride_u, opt_stride_v), kBufferAlignment);
|
|
|
+ int opt_stride_y = m_out_cap_width;
|
|
|
+ int opt_stride_u = (m_out_cap_width + 1) / 2;
|
|
|
+ int opt_stride_v = (m_out_cap_width + 1) / 2;
|
|
|
+ m_opti420 = (uint8_t*)AlignedMalloc(I420DataSize(m_out_cap_height, opt_stride_y, opt_stride_u, opt_stride_v), kBufferAlignment);
|
|
|
|
|
|
if (m_i420 && m_rgb24 && m_opti420){
|
|
|
bret = true;
|
|
|
@@ -966,11 +905,6 @@ bool VideoCaptureImpl::VideoCaptureStarted()
|
|
|
return m_bCaptureStarted;
|
|
|
}
|
|
|
|
|
|
-ICaptureCallback* VideoCaptureImpl::GetCaptureCallback()
|
|
|
-{
|
|
|
- return m_callback;
|
|
|
-}
|
|
|
-
|
|
|
int VideoCaptureImpl::GetCaptureVideoFd()
|
|
|
{
|
|
|
return m_deviceFd;
|
|
|
@@ -984,12 +918,12 @@ VideoType VideoCaptureImpl::GetCaptureVideoType()
|
|
|
|
|
|
int VideoCaptureImpl::GetCapture_Width()
|
|
|
{
|
|
|
- return m_capture_width;
|
|
|
+ return m_real_cap_width;
|
|
|
}
|
|
|
|
|
|
int VideoCaptureImpl::GetCapture_Height()
|
|
|
{
|
|
|
- return m_capture_height;
|
|
|
+ return m_real_cap_height;
|
|
|
}
|
|
|
|
|
|
bool VideoCaptureImpl::GetStopCaptureFlag()
|
|
|
@@ -1005,14 +939,10 @@ int VideoCaptureImpl::StopVideoCapture()
|
|
|
|
|
|
if (0 == pthread_join(m_CaptureThreadId, NULL)) {
|
|
|
m_CaptureThreadId = 0;
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("thread join video capture thread success.");
|
|
|
- }
|
|
|
+ CapLog("thread join video capture thread success.");
|
|
|
}
|
|
|
else {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("thread join video capture thread failed for %s.", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("thread join video capture thread failed for %s.", strerror(errno));
|
|
|
}
|
|
|
|
|
|
DeAllocateVideoBuffers();
|
|
|
@@ -1020,9 +950,7 @@ int VideoCaptureImpl::StopVideoCapture()
|
|
|
close(m_deviceFd);
|
|
|
m_deviceFd = -1;
|
|
|
|
|
|
- if (m_callback) {
|
|
|
- m_callback->Debug("video capture has stopped!");
|
|
|
- }
|
|
|
+ CapLog("video capture has stopped!");
|
|
|
}
|
|
|
|
|
|
return 0;
|
|
|
@@ -1040,9 +968,7 @@ int VideoCaptureImpl::GetCamBrightness(int* ibright)
|
|
|
struct v4l2_control ctrl;
|
|
|
ctrl.id = V4L2_CID_BRIGHTNESS;
|
|
|
if (ioctl(m_deviceFd,VIDIOC_G_CTRL,&ctrl) == -1){
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("VIDIOC_S_CTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("VIDIOC_S_CTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
|
|
|
}
|
|
|
else {
|
|
|
//m_callback->Debug("VIDIOC_S_CTRL get V4L2_CID_BRIGHTNESS success and brightness is %d", ctrl.value);
|
|
|
@@ -1060,9 +986,7 @@ int VideoCaptureImpl::SetCamBrightness(int ibright)
|
|
|
ctrl.id = V4L2_CID_BRIGHTNESS;
|
|
|
ctrl.value = TransToRealBrightnessValue(ibright);
|
|
|
if (ioctl(m_deviceFd, VIDIOC_S_CTRL, &ctrl) == -1){
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("VIDIOC_S_CTRL set V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("VIDIOC_S_CTRL set V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
|
|
|
}
|
|
|
else{
|
|
|
//m_callback->Debug("VIDIOC_S_CTRL set V4L2_CID_BRIGHTNESS success %d", ctrl.value);
|
|
|
@@ -1099,14 +1023,10 @@ bool VideoCaptureImpl::GetCamBrightnessInfo()
|
|
|
struct v4l2_queryctrl qctrl;
|
|
|
qctrl.id = V4L2_CID_BRIGHTNESS;
|
|
|
if (ioctl(m_deviceFd, VIDIOC_QUERYCTRL, &qctrl) == -1) {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
|
|
|
- }
|
|
|
+ CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
|
|
|
}
|
|
|
else {
|
|
|
- if (m_callback){
|
|
|
- m_callback->Debug("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS success {min(%d) - max(%d)},default is %d", qctrl.minimum, qctrl.maximum, qctrl.default_value);
|
|
|
- }
|
|
|
+ CapLog("VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS success {min(%d) - max(%d)},default is %d", qctrl.minimum, qctrl.maximum, qctrl.default_value);
|
|
|
m_idefaultbrightness = qctrl.default_value;
|
|
|
m_iminbrightness = qctrl.minimum;
|
|
|
m_imaxbrightness = qctrl.maximum;
|
|
|
@@ -1180,3 +1100,13 @@ libyuv::RotationMode VideoCaptureImpl::RotateTrans(int irotate)
|
|
|
return rotation_mode;
|
|
|
}
|
|
|
|
|
|
+
|
|
|
+void VideoCaptureImpl::CapLog(const char* fmt, ...)
|
|
|
+{
|
|
|
+ if (m_callback.debug) {
|
|
|
+ va_list arg;
|
|
|
+ va_start(arg, fmt);
|
|
|
+ (*m_callback.debug)(m_callback.user_data, fmt, arg);
|
|
|
+ va_end(arg);
|
|
|
+ }
|
|
|
+}
|