#include "stdafx.h" #include #include #include #include #include #include "y2k_time.h" #include "rec_common.h" #include "videohorflip.h" #include "Event.h" #include "capture.h" using namespace CameraConfigManage; #define av_always_inline __inline #define inline __inline static int Bin2Str(unsigned char *x, int xlen, char *str, int str_size) { static const char *hex2char = "0123456789ABCDEF"; int i, k = 0; if (str_size <= xlen * 2) return -1; for (i = 0; i < xlen; ++i) { int h = x[i] >> 4; int l = x[i] & 0xf; str[k++] = hex2char[h]; str[k++] = hex2char[l]; } str[k] = 0; return k; } static int translate_id(int in_direction, int idx) { int i, n, ii; n = Pa_GetDeviceCount(); for (i = 0, ii = 0; i < n; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (in_direction) { if (info->maxInputChannels) { if (ii == idx) { return i; } ii++; } } else { if (info->maxOutputChannels) { if (ii == idx) { return i; } ii++; } } } return -1; } static int StreamCallback(const void *input, void *output, unsigned long frameCount, const PaStreamCallbackTimeInfo* timeInfo, PaStreamCallbackFlags statusFlags, void *userData) { audio_capture_t *audio_cap = (audio_capture_t*)userData; if (input) { audio_frame frm; frm.bitspersample = 16; frm.format = 1; frm.data = (char*)const_cast(input); frm.framesize = frameCount << 1; frm.nchannels = 1; frm.samplespersec = CAPTURE_CLOCK; if (!audio_cap->shm_queue->InsertAudio(&frm)) { Dbg("Insert audio for video echo failed! frameCount:%d", frameCount); } } if (output) { memset(output, 0, frameCount<<1); } return paContinue; } static audio_capture_t *audio_capture_create(capture_t *cap) { audio_capture_t *audio_cap = ZALLOC_T(audio_capture_t); if (audio_cap) { audio_cap->parent = cap; audio_cap->shm_queue = new Clibaudioqueue(REC_COMMON_AUDIO_SHM_QUEUE); } return audio_cap; } static void audio_capture_destroy(audio_capture_t *audio_cap) { delete audio_cap->shm_queue; free(audio_cap); } static int audio_capture_start(audio_capture_t *audio_cap) { capture_t *cap = audio_cap->parent; PaStreamParameters inParam = {0}; PaStreamParameters outParam = {0}; PaError paError; const PaDeviceInfo *info; int nId = capture_get_audio_device_id(true, cap->config.strAudioIn); if (nId == -1) { //需要立即处理的告警使用Severity_High //LogWarn(Severity_High,Error_DevMedia,ERROR_MOD_MEDIACONTROLLER_HANDFREEIN_INITFAIL,"handfree in device config error,please check"); return Error_AudioIN; } int in_dev_id = translate_id(TRUE, nId); if (in_dev_id < 0) { Dbg("audio in dev translate failed!"); return Error_AudioIN; } info = Pa_GetDeviceInfo(in_dev_id); if (!info) { Dbg("get device info failed!"); return Error_AudioIN; } inParam.channelCount = 1; inParam.device = in_dev_id; inParam.suggestedLatency = info->defaultLowInputLatency; inParam.sampleFormat = paInt16; inParam.hostApiSpecificStreamInfo = NULL; if (Pa_IsFormatSupported(&inParam, NULL, CAPTURE_CLOCK) != paNoError) { Dbg("audio capture create error, cannot open audio input device"); return Error_AudioIN; } //打开流设备,可以用以下代码替换paError = Pa_OpenStream(&audio_cap->stream, &inParam, &outParam, CAPTURE_CLOCK, //CAPTURE_FRAME_TIME * CAPTURE_CLOCK/1000, paClipOff|paDitherOff, &StreamCallback, audio_cap); paError = Pa_OpenStream(&audio_cap->stream, &inParam, NULL, CAPTURE_CLOCK, CAPTURE_FRAME_TIME * CAPTURE_CLOCK/1000, paClipOff|paDitherOff, &StreamCallback, audio_cap); if (paError != paNoError) { Dbg("portaudio open stream failed! paError = %d", paError); return Error_AudioIN; } paError = Pa_StartStream(audio_cap->stream); if (paError != paNoError) { Dbg("portaudio start stream failed! paError = %d", paError); return Error_AudioIN; } return Error_Succeed; } static void audio_capture_stop(audio_capture_t *audio_cap) { if (audio_cap->stream) { Pa_AbortStream(audio_cap->stream); Pa_CloseStream(audio_cap->stream); audio_cap->stream = NULL; } } static int calc_capture_mode(int width, int height, int *mode) { const struct { int mode; int width; int height; } modes [] = { {VIDEOCAP_FRAME_SQCIF, VIDEOCAP_SQCIF_WIDTH, VIDEOCAP_SQCIF_HEIGHT}, {VIDEOCAP_FRAME_QQVGA, VIDEOCAP_QQVGA_WIDTH, VIDEOCAP_QQVGA_HEIGHT}, {VIDEOCAP_FRAME_QCIF, VIDEOCAP_QCIF_WIDTH, VIDEOCAP_QCIF_HEIGHT}, {VIDEOCAP_FRAME_QVGA, VIDEOCAP_QVGA_WIDTH, VIDEOCAP_QVGA_HEIGHT}, {VIDEOCAP_FRAME_CIF, VIDEOCAP_CIF_WIDTH, VIDEOCAP_CIF_HEIGHT}, {VIDEOCAP_FRAME_VGA, VIDEOCAP_VGA_WIDTH, VIDEOCAP_VGA_HEIGHT}, {VIDEOCAP_FRAME_4CIF, VIDEOCAP_4CIF_WIDTH, VIDEOCAP_4CIF_HEIGHT}, {VIDEOCAP_FRAME_SVGA, VIDEOCAP_SVGA_WIDTH, VIDEOCAP_SVGA_HEIGHT}, {VIDEOCAP_FRAME_NHD, VIDEOCAP_NHD_WIDTH, VIDEOCAP_NHD_HEIGHT}, {VIDEOCAP_FRAME_SXGA, VIDEOCAP_SXGA_WIDTH, VIDEOCAP_SXGA_HEIGHT}, {VIDEOCAP_FRAME_720P, VIDEOCAP_720P_WIDTH, VIDEOCAP_720P_HEIGHT}, {VIDEOCAP_FRAME_1080P, VIDEOCAP_1080P_WIDTH, VIDEOCAP_1080P_HEIGHT}, }; int i; for (i = 0; i < array_size(modes); ++i) { if (modes[i].width == width && modes[i].height == height) { *mode = modes[i].mode; return 0; } } return Error_NotExist; } static int video_shm_enqueue(Clibvideoqueue *shm_queue, video_frame *frame, int flags) { videoq_frame tmp_frm; tmp_frm.data = frame->data[0]; tmp_frm.framesize = frame->width * frame->height * 3; tmp_frm.format = VIDEOQ_FORMAT_RGB24; tmp_frm.width = frame->width; tmp_frm.height = frame->height; unsigned int nowtime = y2k_time_now(); if (!shm_queue->InsertVideo(&tmp_frm, flags,nowtime)) { Dbg("caution: insert shm video failed!"); return Error_Unexpect; } else { //Dbg("insert shm video ok!"); return Error_Succeed; } } static void cap_on_frame(void *user_data, video_frame *frame) { video_capture_t *video_cap = (video_capture_t *)user_data; capture_t *cap = video_cap->parent; int rc; int flip = -1; if (cap->config.video_rotate == 0){ flip = 0; } else if (cap->config.video_rotate == 180){ flip = (VIDEOQUEUE_FLAG_VERTICAL_FLIP|VIDEOQUEUE_FLAG_HORIZONTAL_FLIP); } else { return; } video_cap->frame_id++; //Dbg("start echo on frame, id=%d, tick=%d", video_cap->frame_id, GetTickCount()); /*IplImage*img = NULL; img = cvCreateImage(cvSize(frame->width,frame->height),IPL_DEPTH_8U,3); img->imageData = (char*)frame->data[0]; cvSaveImage("c:\\echo.jpg", img,0); cvReleaseImageHeader(&img);*/ //// snapshot //if (rc==Error_Succeed) //{ // if (*cap->config.ref_capture_count) // { // Dbg("echo camera ref_env_capture_count=%d",*cap->config.ref_capture_count); // InterlockedDecrement(cap->config.ref_capture_count); // LogEvent(Severity_Middle, MOD_EVENT_MEDIACONTROLLER_FINISHED_CAPTURE_ENV, "agent capture env ok, and capture env finished!"); // } //} // preview 320x240 { video_frame preview_frame; video_frame_alloc(REC_COMMON_VIDEO_PREVIEW_WIDTH, REC_COMMON_VIDEO_PREVIEW_HEIGHT, VIDEO_FORMAT_RGB24, &preview_frame); memset(preview_frame.data[0], 0, preview_frame.height*preview_frame.linesize[0]); uint8_t *dst_data[4] = {preview_frame.data[0] + 30 * preview_frame.linesize[0], 0, 0, 0}; // 320x180 paste to 320x240 sws_scale(video_cap->preview_sws_ctx, frame->data, frame->linesize, 0, frame->height, dst_data, preview_frame.linesize); video_shm_enqueue(video_cap->preview_shm_queue, &preview_frame, flip); video_frame_free(&preview_frame); } //Dbg("end echo on frame, id=%d, tick=%d", video_cap->frame_id, GetTickCount());; } static video_capture_t *video_capture_create(capture_t *cap, int camera_type) { video_capture_t *video_cap = ZALLOC_T(video_capture_t); if (video_cap) { video_cap->parent = cap; video_cap->camera_type = camera_type; video_cap->frame_id = 0; if (camera_type == CAMERA_TYPE_ENV) { // need to be edited video_cap->preview_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_ENV_SHM_PREVIEW_QUEUE); video_cap->preview_sws_ctx = sws_getContext(REC_COMMON_VIDEO_SNAPSHOT_WIDTH, REC_COMMON_VIDEO_SNAPSHOT_HEIGHT, PIX_FMT_BGR24, REC_COMMON_VIDEO_RTP_ENV_WIDTH, REC_COMMON_VIDEO_RTP_ENV_HEIGHT, PIX_FMT_BGR24, SWS_FAST_BILINEAR, NULL, NULL, NULL); } } return video_cap; } static void video_capture_destroy(video_capture_t *video_cap) { if (video_cap) { if (video_cap->preview_sws_ctx) { sws_freeContext(video_cap->preview_sws_ctx); video_cap->preview_sws_ctx = NULL; } if (video_cap->preview_shm_queue) { delete video_cap->preview_shm_queue; video_cap->preview_shm_queue = NULL; } free(video_cap); } } static int video_capture_start(video_capture_t *video_cap) { capture_config_t *conf = &video_cap->parent->config; int dev_id; if (video_cap->camera_type == CAMERA_TYPE_ENV) { capture_camera_t tmp; dev_id = capture_get_video_device_id(conf->strVideo,tmp); if (dev_id == -1) { Dbg("No echo camera,please check config file or device!"); return -1; } } videocap_param param = {0}; int cap_mode; int rc = -1; rc = calc_capture_mode(REC_COMMON_VIDEO_SNAPSHOT_WIDTH, REC_COMMON_VIDEO_SNAPSHOT_HEIGHT, &cap_mode); if (rc != 0) { Dbg("calc cap_mode failed!"); return rc; } param.cap_mode = cap_mode; param.dev_id = dev_id; param.frame_fmt = VIDEO_FORMAT_RGB24; if ((ePadtype == g_eDeviceType)||(eDesk2SType == g_eDeviceType)) { param.fps = REC_COMMON_VIDEO_FPS_MOBILE; } else { param.fps = REC_COMMON_VIDEO_RAW_FPS; } param.on_frame = &cap_on_frame; param.user_data = video_cap; param.option = 0; rc = videocap_create(&video_cap->cap, ¶m); if (rc != 0) { Dbg("videocap create failed!"); return rc; } rc = videocap_start(video_cap->cap); if (rc != 0) { Dbg("videocap start failed!"); videocap_destroy(video_cap->cap); video_cap->cap = NULL; return rc; } return 0; } static void video_capture_stop(video_capture_t *video_cap) { if (video_cap->cap) { videocap_stop(video_cap->cap); videocap_destroy(video_cap->cap); video_cap->cap = NULL; } } static int audio_get_dev_count(int *in_cnt, int *out_cnt) { int icnt = 0, ocnt = 0; int cnt = Pa_GetDeviceCount(); for (int i = 0; i < cnt; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (info->maxInputChannels) icnt ++; if (info->maxOutputChannels) ocnt ++; } if (in_cnt) *in_cnt = icnt; if (out_cnt) *out_cnt = ocnt; return 0; } static CSimpleStringA audio_get_dev_name(bool in_direction, int idx) { int cnt = Pa_GetDeviceCount(); int ii, i; for (i = 0, ii = 0; i < cnt; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (in_direction) { if (info->maxInputChannels) { if (idx == ii) { return CSimpleStringA(info->name); } ii++; } } else { if (info->maxOutputChannels) { if (idx == ii) { return CSimpleStringA(info->name); } ii++; } } } return CSimpleStringA(); } namespace CameraConfigManage { DeviceTypeEnum g_eDeviceType; int capture_create( const capture_config_t *config, capture_t **p_cap ) { capture_t *cap = ZALLOC_T(capture_t); cap->audio = NULL; cap->video = NULL; memcpy(&cap->config, config, sizeof(capture_config_t)); cap->audio = audio_capture_create(cap); if (!cap->audio) { Dbg("create audio capture object failed!"); return Error_Unexpect; } capture_camera_t tmp; int dev_id = capture_get_video_device_id(config->strVideo,tmp); if (dev_id != -1) { cap->video = video_capture_create(cap, CAMERA_TYPE_ENV); if (!cap->video) { Dbg("create echo video object failed!"); return Error_Unexpect; } } if(dev_id == -1) { Dbg("echo camera deviceid error!"); capture_destroy(cap); return Error_Unexpect; } else { *p_cap = cap; return 0; } } // 重启摄像头用到 ErrorCodeEnum capture_create( const capture_config_t *config,capture_t *cap ) { capture_camera_t tmp; int dev_id = capture_get_video_device_id(config->strVideo,tmp); if((dev_id != -1)&&(cap->video == NULL)) { cap->video = video_capture_create(cap, CAMERA_TYPE_ENV); if (!cap->video) { Dbg("create echo video object failed!"); return Error_Unexpect; } } else { return Error_Unexpect; } return Error_Succeed; } void capture_destroy( capture_t *cap ) { if (cap) { if (cap->video) { video_capture_destroy(cap->video); cap->video = NULL; } if (cap->audio) { audio_capture_destroy(cap->audio); cap->audio = NULL; } free(cap); } } ErrorCodeEnum capture_start( capture_t *cap ) { int rc = 0; if (cap->audio) { rc = audio_capture_start(cap->audio); if (rc != Error_Succeed) { ErrorCodeEnum rslt = (ErrorCodeEnum)rc; if (rslt == Error_AudioIN) { Dbg("start audio In object failed! rc:%d", rc); //LogWarn(Severity_High,Error_NotInit,ERROR_MOD_MEDIACONTROLLER_HANDFREE_OPENFAIL,"open audio device fail,please check device"); } return rslt; } } else { Dbg("start echo audio Error_Unexpect"); return Error_Unexpect; } if (cap->video) { rc = video_capture_start(cap->video); if (rc != Error_Succeed) { Dbg("start echo video capture object failed! rc:%d", rc); //LogError(Severity_Middle,Error_NotInit,ERROR_MOD_MEDIACONTROLLER_ENVCAM_OPEN,"open echo camera fail,please check device"); return Error_EnvCamera; } } else { Dbg("start echo video Error_Unexpect"); return Error_Unexpect; } return (ErrorCodeEnum)rc; } void capture_stop( capture_t *cap ) { if (cap->audio) { audio_capture_stop(cap->audio); } if (cap->video) { video_capture_stop(cap->video); } } int capture_detect_camera_bug( capture_t *cap, int *n ) { *n = 0; if (cap->video) { if (cap->video->preview_shm_queue) { *n = cap->video->preview_shm_queue->GetVideoLens(); } } else { *n = -1; } return 0; } int capture_get_last_frametime( capture_t *cap, DWORD *n ) { *n = 0; if (cap->video) { if (cap->video->preview_shm_queue) { *n = cap->video->preview_shm_queue->GetLastFrameTime(); } } else { *n = 0; } return 0; } int capture_lib_init() { HRESULT hr = CoInitialize(NULL); int rc; { HMODULE hModule = GetModuleHandleA("MSVCR100.dll"); if (hModule) { typedef char *(*f_setlocale)(int, const char*); f_setlocale f = (f_setlocale)GetProcAddress(hModule, "setlocale"); (*f)(LC_ALL, "chs"); } } if (SUCCEEDED(hr)) { PaError Error; Error = Pa_Initialize(); if (Error == paNoError) { rc = videoframework_init(); if (rc != 0) { Dbg("videoframework_init failed, rc=%d", rc); return Error_Resource; } } else { Dbg("PaInitialize failed, rc=%d", Error); return Error_Resource; } } else { Dbg("coinitialze failed! hr:%d", hr); return Error_Resource; } { int i, n; n = videocap_get_device_count(); for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); capture_md5(t1); Dbg("%d = %s;%s", i, t, t1); } } { int icnt, ocnt; rc = audio_get_dev_count(&icnt, &ocnt); if (rc == 0) { int i; Dbg("audio input devices(%d):", icnt); for (i = 0; i < icnt; ++i) { CSimpleStringA str = audio_get_dev_name(true, i); Dbg("%d = %s", i, (LPCSTR)str); } Dbg("audio output devices(%d):", ocnt); for (i = 0; i < ocnt; ++i) { CSimpleStringA str = audio_get_dev_name(false, i); Dbg("%d = %s", i, (LPCSTR)str); } } } return Error_Succeed; } void capture_lib_term() { videoframework_term(); Pa_Terminate(); CoUninitialize(); } int capture_get_audio_device_id( bool in_direction, const char *dev_name ) { int cnt = Pa_GetDeviceCount(); int ii, i; for (i = 0, ii = 0; i < cnt; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (in_direction) { if (info->maxInputChannels) { if (strstr(info->name, dev_name) != NULL) { return ii; } ii++; } } else { if (info->maxOutputChannels) { if (strstr(info->name, dev_name) != NULL) { return ii; } ii++; } } } return -1; } //int capture_get_video_device_id( const char *dev_name ) //{ // int i, n; // // n = videocap_get_device_count(); // for (i = 0; i < n; ++i) { // WCHAR tmp[256]; // char t[256]; // WCHAR tmp1[256]; // char t1[256]; // videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); // WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); // videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); // WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // capture_md5(t1); // strcat(t, ";"); // strcat(t, t1); // if (strcmp(dev_name, t) == 0) // return i; // } // return -1; // not found //} int capture_get_video_device_id(const char *dev_name, capture_camera_t &camera) { int i, n; n = videocap_get_device_count(); for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath (add by ly at 20160725) char t2[256]; strcpy(t2,t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } capture_md5(t1); if (dev_name != NULL && strlen(dev_name) > 1 && strstr(dev_name, ";") == NULL) // 外接摄像头 add by ly at 20160531 { if (strstr(t2,dev_name) != NULL) // 判断外接摄像头DeviceLocationPaths是否是DevicePath的子串 { camera.strFriendlyName = t; camera.strDevPath = t2; camera.strMd5Val = t1; return i; } if (strcmp(dev_name, t) == 0) // 如果是直接用友好名称查询(适用于高拍仪) add by ly 2017/11/08 return i; } else { char t3[256]; strcpy(t3,t); strcat(t3, ";"); strcat(t3, t1); if (strcmp(dev_name, t3) == 0){ camera.strFriendlyName = t; camera.strDevPath = t2; camera.strMd5Val = t1; return i; } } } return -1; // not found } bool capture_check_video_device_match( const char *dev_name, const char*dev_inst_path ) { int i, n; n = videocap_get_device_count(); for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath (add by ly at 20160725) char t2[256]; strcpy(t2,t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } capture_md5(t1); strcat(t, ";"); strcat(t, t1); if (strstr(t2,dev_inst_path) != NULL) { Dbg("[dbg] %s founded in %d cameras.", dev_inst_path, n); if (strcmp(dev_name, t) == 0) return true; } } return false; // not match } bool capture_adj_brightness( capture_t *cap,int nvalue,ErrorCodeEnum nCode ) { HRESULT rst = S_OK; if (cap->video&&(nCode!=Error_EnvCamera)&&(nCode!=Error_AllCamera)) { rst = videocap_adj_brightness(cap->video->cap,nvalue); } if (SUCCEEDED(rst)) return true; return false; } bool capture_set_autobrightness( capture_t *cap,ErrorCodeEnum nCode ) { HRESULT rst = S_OK; if (cap->video&&(nCode!=Error_EnvCamera)&&(nCode!=Error_AllCamera)) { rst = videocap_set_autobrightness(cap->video->cap); } if (SUCCEEDED(rst)) return true; return false; } int capture_get_brightness( capture_t *cap,ErrorCodeEnum nCode ) { int nValue=0; HRESULT rst = S_OK; if (cap->video&&(nCode!=Error_EnvCamera)&&(nCode!=Error_AllCamera)) { HRESULT rst = videocap_get_brightness(cap->video->cap,&nValue); } else { return -1; } if (SUCCEEDED(rst)) { return nValue; } else { return -1; } } int stop_camera( capture_t *cap ) { if (cap->video) { video_capture_stop(cap->video); video_capture_destroy(cap->video); cap->video = NULL; return 0; } else { return -1; } } void capture_enum_cameras( std::vector &cams, CAutoArray &hspcams) { int i, n; cams.clear(); n = videocap_get_device_count(); if (n <= 0) return; int m = hspcams.GetCount(); // 高拍仪的个数 for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256],t2[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); // 检查是否为高拍仪摄像头,若是则直接枚举下一个摄像头 bool isHspCam = false; for (int k = 0; k < m; ++k) { if (!stricmp((LPCTSTR)hspcams[k],t)) { isHspCam = true; break; } } if (isHspCam) { continue; } capture_camera_t camera; videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath strcpy(t2,t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } capture_md5(t1); camera.strFriendlyName = t; camera.strDevPath = t2; camera.strMd5Val = t1; cams.push_back(camera); } } void capture_md5( char *dev_path ) { if (dev_path&&strlen(dev_path)>0) { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, dev_path, strlen(dev_path)); Bin2Str(x, sizeof(x), dev_path, strlen(dev_path)); } } bool capture_get_only_video_device_by_friendlyname(const CSimpleStringA friendlyname, CAutoArray &excludecams, const CAutoArray &hspcams, capture_camera_t &camera) { int i, n; n = videocap_get_device_count(); if (n <= 0) return false; int cnt = 0; int m = hspcams.GetCount(); // 高拍仪的个数 for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256], t2[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); Dbg("[capture_get_only_video_device_by_friendlyname]%d, %s excludecams:%d.", i, t, excludecams.GetCount()); // 检查是否为高拍仪摄像头,若是则直接枚举下一个摄像头 bool isHspCam = false; for (int k = 0; k < m; ++k) { if (!stricmp((LPCTSTR)hspcams[k], t)) { isHspCam = true; break; } } if (isHspCam) { Dbg("[get_only_video_device_by_friendlyname]%d, %s isHspCam.", i, t); continue; } videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath strcpy(t2, t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } capture_md5(t1); // 检查是否为排除摄像头,若是则直接枚举下一个摄像头 bool isExcludeCam = false; for (int k = 0; k < excludecams.GetCount(); ++k) { if (!strcmp((LPCTSTR)excludecams[k], t1)) { isExcludeCam = true; break; } } if (isExcludeCam) { Dbg("[get_only_video_device_by_friendlyname]%d, %s Md5:%s isExcludeCam.", i, t, t1); continue; } if (strcmp((LPCTSTR)friendlyname, t) == 0) { if (0 == cnt) { camera.strFriendlyName = t; camera.strDevPath = t2; camera.strMd5Val = t1; } cnt++; Dbg("[get_only_video_device_by_friendlyname]%d, %s Md5:%s DevPath:%s isFoundCam.", i, t, t1, t2); } } if (1 != cnt) // 未找到唯一摄像头 { camera.strFriendlyName = "$"; camera.strDevPath = "$"; camera.strMd5Val = "$"; return false; // not found } return true; } bool capture_get_only_video_device(CAutoArray &excludecams, const CAutoArray &hspcams, capture_camera_t &camera) { int i, n; n = videocap_get_device_count(); if (n <= 0) return false; int cnt = 0; int m = hspcams.GetCount(); // 高拍仪的个数 for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256], t2[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); Dbg("[capture_get_only_video_device]%d, %s excludecams:%d.", i, t, excludecams.GetCount()); // 检查是否为高拍仪摄像头,若是则直接枚举下一个摄像头 bool isHspCam = false; for (int k = 0; k < m; ++k) { if (!stricmp((LPCTSTR)hspcams[k], t)) { isHspCam = true; break; } } if (isHspCam) { Dbg("[capture_get_only_video_device]%d, %s isHspCam.", i, t); continue; } videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath strcpy(t2, t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } capture_md5(t1); // 检查是否为排除摄像头,若是则直接枚举下一个摄像头 bool isExcludeCam = false; for (int k = 0; k < excludecams.GetCount(); ++k) { if (!strcmp((LPCTSTR)excludecams[k], t1)) { isExcludeCam = true; break; } } if (isExcludeCam) { Dbg("[capture_get_only_video_device]%d, %s Md5:%s isExcludeCam.", i, t, t1); continue; } if (0 == cnt) { camera.strFriendlyName = t; camera.strDevPath = t2; camera.strMd5Val = t1; } cnt++; Dbg("[capture_get_only_video_device]%d, %s Md5:%s DevPath:%s isFoundCam.", i, t, t1, t2); } if (1 != cnt) // 未找到唯一摄像头 { camera.strFriendlyName = "$"; camera.strDevPath = "$"; camera.strMd5Val = "$"; return false; // not found } return true; } }