#include "stdafx.h" #include "SpBase.h" #include #include #include #include #include #include #include "videoframework.h" #include "y2k_time.h" #include "libaudioqueue.h" #include "libvideoqueue.h" #include "rec_common.h" #include "ews_capture.h" #include "videohorflip.h" #include "Event.h" #include "EventCode.h" #include #include #include #define av_always_inline __inline #define inline __inline #ifndef INT64_C #define INT64_C(c) (c##LL) #define UINT64_C(c) (c##UL) #endif #include extern "C" { #include #include #include } #include "video_common/ffmpeg_api_cpp_adapter.h" #define CAPTURE_FRAME_TIME 20 // 20ms per frame #define CAPTURE_CLOCK 8000 DeviceTypeEnum g_eDeviceType; typedef struct ews_audio_capture_t { PaStream *stream; Clibaudioqueue *shm_queue; ews_capture_t *parent; }ews_audio_capture_t; typedef struct ews_video_capture_t { videocap_t cap; Clibvideoqueue *snapshot_shm_queue; Clibvideoqueue *preview_shm_queue; Clibvideoqueue *rtp_shm_queue; Clibvideoqueue *sales_shm_queue; ews_capture_t *parent; int camera_type; // CAMERA_TYPE_xxx int frame_id; struct SwsContext *preview_sws_ctx; struct SwsContext *rtp_sws_ctx; }ews_video_capture_t; struct ews_capture_t { ews_capture_config_t config; ews_audio_capture_t *audio; ews_video_capture_t *video; }; static int Bin2Str(unsigned char *x, int xlen, char *str, int str_size) { static const char *hex2char = "0123456789ABCDEF"; int i, k = 0; if (str_size <= xlen * 2) return -1; for (i = 0; i < xlen; ++i) { int h = x[i] >> 4; int l = x[i] & 0xf; str[k++] = hex2char[h]; str[k++] = hex2char[l]; } str[k] = 0; return k; } static int translate_id(int in_direction, int idx) { int i, n, ii; n = Pa_GetDeviceCount(); for (i = 0, ii = 0; i < n; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (in_direction) { if (info->maxInputChannels) { if (ii == idx) { return i; } ii++; } } else { if (info->maxOutputChannels) { if (ii == idx) { return i; } ii++; } } } return -1; } static int StreamCallback(const void *input, void *output, unsigned long frameCount, const PaStreamCallbackTimeInfo* timeInfo, PaStreamCallbackFlags statusFlags, void *userData) { ews_audio_capture_t *audio_cap = (ews_audio_capture_t*)userData; if (input) { audio_frame frm; frm.bitspersample = 16; frm.format = 1; frm.data = (char*)const_cast(input); frm.framesize = frameCount << 1; frm.nchannels = 1; frm.samplespersec = audio_cap->parent->config.iaudiosamplerate; frm.iseriesnumber = 0; if (audio_cap && audio_cap->shm_queue){ if (!audio_cap->shm_queue->InsertAudio(&frm)) { Dbg("Insert audio for surveillance record failed! frameCount:%d", frameCount); } } } if (output) { memset(output, 0, frameCount<<1); } return paContinue; } static ews_audio_capture_t *ews_audio_capture_create(ews_capture_t *cap) { ews_audio_capture_t *audio_cap = ZALLOC_T(ews_audio_capture_t); if (audio_cap) { audio_cap->parent = cap; audio_cap->shm_queue = new Clibaudioqueue(REC_COMMON_AUDIO_EWS_SHM_QUEUE); } return audio_cap; } static void ews_audio_capture_destroy(ews_audio_capture_t *audio_cap) { delete audio_cap->shm_queue; free(audio_cap); } static int ews_audio_capture_start(ews_audio_capture_t *audio_cap) { ews_capture_t *cap = audio_cap->parent; PaStreamParameters inParam = {0}; PaStreamParameters outParam = {0}; PaError paError; const PaDeviceInfo *info; int nId = ews_capture_get_audio_device_id(true, cap->config.strAudioIn); if (nId == -1) { return Error_AudioIN; } int in_dev_id = translate_id(TRUE, nId); if (in_dev_id < 0) { Dbg("audio in dev translate failed!"); return Error_AudioIN; } info = Pa_GetDeviceInfo(in_dev_id); if (!info) { Dbg("get device info failed!"); return Error_AudioIN; } inParam.channelCount = 1; inParam.device = in_dev_id; inParam.suggestedLatency = info->defaultLowInputLatency; inParam.sampleFormat = paInt16; inParam.hostApiSpecificStreamInfo = NULL; int iAudioCaptureSampleRate = cap->config.iaudiosamplerate; if (Pa_IsFormatSupported(&inParam, NULL, iAudioCaptureSampleRate) != paNoError) { Dbg("audio capture create error, cannot open audio input device, and current capture sample rate is %d.", iAudioCaptureSampleRate); return Error_AudioIN; } //打开流设备,可以用以下代码替换paError = Pa_OpenStream(&audio_cap->stream, &inParam, &outParam, CAPTURE_CLOCK, //CAPTURE_FRAME_TIME * CAPTURE_CLOCK/1000, paClipOff|paDitherOff, &StreamCallback, audio_cap); paError = Pa_OpenStream(&audio_cap->stream, &inParam, NULL, iAudioCaptureSampleRate, CAPTURE_FRAME_TIME * iAudioCaptureSampleRate/1000, paClipOff|paDitherOff, &StreamCallback, audio_cap); if (paError != paNoError) { Dbg("portaudio open stream failed! paError = %d", paError); return Error_AudioIN; } paError = Pa_StartStream(audio_cap->stream); if (paError != paNoError) { Dbg("portaudio start stream failed! paError = %d", paError); return Error_AudioIN; } return Error_Succeed; } static void ews_audio_capture_stop(ews_audio_capture_t *audio_cap) { if (audio_cap->stream) { Pa_AbortStream(audio_cap->stream); Pa_CloseStream(audio_cap->stream); audio_cap->stream = NULL; } } static int calc_capture_mode(int width, int height, int *mode) { const struct { int mode; int width; int height; } modes [] = { {VIDEOCAP_FRAME_SQCIF, VIDEOCAP_SQCIF_WIDTH, VIDEOCAP_SQCIF_HEIGHT}, {VIDEOCAP_FRAME_QQVGA, VIDEOCAP_QQVGA_WIDTH, VIDEOCAP_QQVGA_HEIGHT}, {VIDEOCAP_FRAME_QCIF, VIDEOCAP_QCIF_WIDTH, VIDEOCAP_QCIF_HEIGHT}, {VIDEOCAP_FRAME_QVGA, VIDEOCAP_QVGA_WIDTH, VIDEOCAP_QVGA_HEIGHT}, {VIDEOCAP_FRAME_CIF, VIDEOCAP_CIF_WIDTH, VIDEOCAP_CIF_HEIGHT}, {VIDEOCAP_FRAME_VGA, VIDEOCAP_VGA_WIDTH, VIDEOCAP_VGA_HEIGHT}, {VIDEOCAP_FRAME_4CIF, VIDEOCAP_4CIF_WIDTH, VIDEOCAP_4CIF_HEIGHT}, {VIDEOCAP_FRAME_SVGA, VIDEOCAP_SVGA_WIDTH, VIDEOCAP_SVGA_HEIGHT}, {VIDEOCAP_FRAME_NHD, VIDEOCAP_NHD_WIDTH, VIDEOCAP_NHD_HEIGHT}, {VIDEOCAP_FRAME_SXGA, VIDEOCAP_SXGA_WIDTH, VIDEOCAP_SXGA_HEIGHT}, {VIDEOCAP_FRAME_720P, VIDEOCAP_720P_WIDTH, VIDEOCAP_720P_HEIGHT}, {VIDEOCAP_FRAME_1080P, VIDEOCAP_1080P_WIDTH, VIDEOCAP_1080P_HEIGHT}, }; int i; for (i = 0; i < array_size(modes); ++i) { if (modes[i].width == width && modes[i].height == height) { *mode = modes[i].mode; return 0; } } return Error_NotExist; } static int video_shm_enqueue(Clibvideoqueue *shm_queue, video_frame *frame, int flags) { videoq_frame tmp_frm; tmp_frm.data = frame->data[0]; tmp_frm.framesize = frame->width * frame->height * 3; tmp_frm.format = VIDEOQ_FORMAT_RGB24; tmp_frm.width = frame->width; tmp_frm.height = frame->height; unsigned int nowtime = y2k_time_now(); if (!shm_queue->InsertVideo(&tmp_frm, flags,nowtime)) { Dbg("caution: insert shm video failed!"); return Error_Unexpect; } else { //Dbg("insert shm video ok!"); return Error_Succeed; } } static void ews_cap_on_frame(void *user_data, video_frame *frame) { ews_video_capture_t *video_cap = (ews_video_capture_t *)user_data; ews_capture_t *cap = video_cap->parent; int rc; int flip = -1; if (cap->config.video_rotate == 0){ flip = 0; } else if (cap->config.video_rotate == 180){ flip = (VIDEOQUEUE_FLAG_VERTICAL_FLIP|VIDEOQUEUE_FLAG_HORIZONTAL_FLIP); } else { return; } video_cap->frame_id++; //Dbg("start ews on frame, id=%d, tick=%d", video_cap->frame_id, GetTickCount()); //IplImage*img = NULL; //img = cvCreateImage(cvSize(frame->width,frame->height),IPL_DEPTH_8U,3); //img->imageData = (char*)frame->data[0]; //cvSaveImage("c:\\ews.jpg", img,0); //cvReleaseImageHeader(&img); rc = video_shm_enqueue(video_cap->snapshot_shm_queue, frame, flip==0?VIDEOQUEUE_FLAG_VERTICAL_FLIP:VIDEOQUEUE_FLAG_HORIZONTAL_FLIP); if (rc != Error_Succeed) { Dbg("ews snapshot queue enqueue shm failed! Error = %d, camera_type=%d", rc, video_cap->camera_type); } //// snapshot //if (rc==Error_Succeed) //{ // if (*cap->config.ref_ews_capture_count) // { // Dbg("ews camera ref_env_capture_count=%d",*cap->config.ref_ews_capture_count); // InterlockedDecrement(cap->config.ref_ews_capture_count); // LogEvent(Severity_Middle, MOD_EVENT_MEDIACONTROLLER_FINISHED_CAPTURE_ENV, "agent capture env ok, and capture env finished!"); // } //} // preview 320x240 { video_frame preview_frame; video_frame_alloc(REC_COMMON_VIDEO_PREVIEW_WIDTH, REC_COMMON_VIDEO_PREVIEW_HEIGHT, VIDEO_FORMAT_RGB24, &preview_frame); memset(preview_frame.data[0], 0, preview_frame.height*preview_frame.linesize[0]); //uint8_t *src_data[4] = {frame->data[0] + 80*3, 0, 0, 0}; // cut for 480x360 //sws_scale(video_cap->preview_sws_ctx, src_data, frame->linesize, 0, frame->height, preview_frame.data, preview_frame.linesize); uint8_t *dst_data[4] = {preview_frame.data[0] + 30 * preview_frame.linesize[0], 0, 0, 0}; // 320x180 paste to 320x240 sws_scale(video_cap->preview_sws_ctx, frame->data, frame->linesize, 0, frame->height, dst_data, preview_frame.linesize); video_shm_enqueue(video_cap->preview_shm_queue, &preview_frame, flip); video_frame_free(&preview_frame); } // rtp 320x180 { video_frame rtp_frame; video_frame_alloc(REC_COMMON_VIDEO_RTP_EWS_WIDTH, REC_COMMON_VIDEO_RTP_EWS_HEIGHT, VIDEO_FORMAT_RGB24, &rtp_frame); uint8_t *src_data[4] = {frame->data[0] + (frame->height-1) * frame->linesize[0], 0, 0, 0}; int src_linesize[4] = {-frame->linesize[0], 0, 0, 0}; // 上下翻转并缩放 sws_scale(video_cap->rtp_sws_ctx, src_data, src_linesize, 0, frame->height, rtp_frame.data, rtp_frame.linesize); video_shm_enqueue(video_cap->rtp_shm_queue, &rtp_frame, flip); video_shm_enqueue(video_cap->sales_shm_queue, &rtp_frame, flip); #if 0 static int i = 0; if (i == 0 && 0) { video_frame tmp_frame; video_frame_alloc(320, 180, VIDEO_FORMAT_RGB24, &tmp_frame); video_frame_fill_black(&tmp_frame); videoq_frame frm; frm.data = tmp_frame.data[0]; video_cap->rtp_shm_queue->GetVideo(&frm, 0); video_frame_save_bmpfile("d:\\abc.bmp", &tmp_frame); video_frame_free(&tmp_frame); //video_frame_save_bmpfile("d:\\ab.bmp", &rtp_frame); } #endif video_frame_free(&rtp_frame); } //Dbg("end ews on frame, id=%d, tick=%d", video_cap->frame_id, GetTickCount());; } static ews_video_capture_t *ews_video_capture_create(ews_capture_t *cap, int camera_type) { ews_video_capture_t *video_cap = ZALLOC_T(ews_video_capture_t); if (video_cap) { video_cap->parent = cap; video_cap->camera_type = camera_type; video_cap->frame_id = 0; if (camera_type == CAMERA_TYPE_EWS) { // need to be edited video_cap->snapshot_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_EWS_SHM_SNAPSHOT_QUEUE); video_cap->rtp_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_EWS_SHM_RTP_QUEUE); video_cap->sales_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_SALES_EWS_SHM_RTP_QUEUE); video_cap->rtp_sws_ctx = sws_getContext(REC_COMMON_VIDEO_SNAPSHOT_WIDTH, REC_COMMON_VIDEO_SNAPSHOT_HEIGHT, PIX_FMT_BGR24, REC_COMMON_VIDEO_RTP_EWS_WIDTH, REC_COMMON_VIDEO_RTP_EWS_HEIGHT, PIX_FMT_BGR24, SWS_POINT, NULL, NULL, NULL); video_cap->preview_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_EWS_SHM_PREVIEW_QUEUE); /*video_cap->preview_sws_ctx = sws_getContext(REC_COMMON_VIDEO_SNAPSHOT_PREVIEW_WIDTH, REC_COMMON_VIDEO_SNAPSHOT_PREVIEW_HEIGHT, PIX_FMT_BGR24, REC_COMMON_VIDEO_PREVIEW_WIDTH, REC_COMMON_VIDEO_PREVIEW_HEIGHT, PIX_FMT_BGR24, SWS_FAST_BILINEAR, NULL, NULL, NULL);*/ video_cap->preview_sws_ctx = sws_getContext(REC_COMMON_VIDEO_SNAPSHOT_WIDTH, REC_COMMON_VIDEO_SNAPSHOT_HEIGHT, PIX_FMT_BGR24, REC_COMMON_VIDEO_RTP_EWS_WIDTH, REC_COMMON_VIDEO_RTP_EWS_HEIGHT, PIX_FMT_BGR24, SWS_FAST_BILINEAR, NULL, NULL, NULL); } } return video_cap; } static void ews_video_capture_destroy(ews_video_capture_t *video_cap) { if (video_cap) { if (video_cap->preview_sws_ctx) { sws_freeContext(video_cap->preview_sws_ctx); video_cap->preview_sws_ctx = NULL; } if (video_cap->rtp_sws_ctx) { sws_freeContext(video_cap->rtp_sws_ctx); video_cap->rtp_sws_ctx = NULL; } if (video_cap->snapshot_shm_queue) { delete video_cap->snapshot_shm_queue; video_cap->snapshot_shm_queue = NULL; } if (video_cap->rtp_shm_queue) { delete video_cap->rtp_shm_queue; video_cap->rtp_shm_queue = NULL; } if (video_cap->sales_shm_queue) { delete video_cap->sales_shm_queue; video_cap->sales_shm_queue = NULL; } if (video_cap->preview_shm_queue) { delete video_cap->preview_shm_queue; video_cap->preview_shm_queue = NULL; } free(video_cap); } } static int ews_video_capture_start(ews_video_capture_t *video_cap) { ews_capture_config_t *conf = &video_cap->parent->config; int dev_id; if (video_cap->camera_type == CAMERA_TYPE_EWS) { CSimpleStringA tmp; dev_id = ews_capture_get_video_device_id(conf->strVideo,tmp); if (dev_id == -1) { Dbg("No ews camera,please check config file or device!"); return -1; } } videocap_param param = {0}; int cap_mode; int rc = -1; rc = calc_capture_mode(REC_COMMON_VIDEO_SNAPSHOT_WIDTH, REC_COMMON_VIDEO_SNAPSHOT_HEIGHT, &cap_mode); if (rc != 0) { Dbg("calc cap_mode failed!"); return rc; } param.cap_mode = cap_mode; param.dev_id = dev_id; param.frame_fmt = VIDEO_FORMAT_RGB24; if ((ePadtype == g_eDeviceType)||(eDesk2SType == g_eDeviceType)) { param.fps = REC_COMMON_VIDEO_FPS_MOBILE; } else { param.fps = REC_COMMON_VIDEO_RAW_FPS; } param.on_frame = &ews_cap_on_frame; param.user_data = video_cap; param.option = 0; rc = videocap_create(&video_cap->cap, ¶m); if (rc != 0) { Dbg("videocap create failed!"); return rc; } rc = videocap_start(video_cap->cap); if (rc != 0) { Dbg("videocap start failed!"); videocap_destroy(video_cap->cap); video_cap->cap = NULL; return rc; } return 0; } static void ews_video_capture_stop(ews_video_capture_t *video_cap) { if (video_cap->cap) { videocap_stop(video_cap->cap); videocap_destroy(video_cap->cap); video_cap->cap = NULL; } } int ews_capture_create( const ews_capture_config_t *config, ews_capture_t **p_cap ) { ews_capture_t *cap = ZALLOC_T(ews_capture_t); cap->audio = NULL; cap->video = NULL; memcpy(&cap->config, config, sizeof(ews_capture_config_t)); cap->audio = ews_audio_capture_create(cap); if (!cap->audio) { Dbg("create audio capture object failed!"); return Error_Unexpect; } CSimpleStringA tmp; int dev_id = ews_capture_get_video_device_id(config->strVideo,tmp); if (dev_id != -1) { cap->video = ews_video_capture_create(cap, CAMERA_TYPE_EWS); if (!cap->video) { Dbg("create ews video object failed!"); return Error_Unexpect; } } if(dev_id == -1) { Dbg("ews camera device id error!"); ews_capture_destroy(cap); return Error_Unexpect; } else { *p_cap = cap; Dbg("create surveillance record audio capture object(%0x) success, capture sample rate is %d, and audio in device name is %s.", cap, cap->config.iaudiosamplerate, cap->config.strAudioIn.GetData()); return 0; } } // 重启摄像头用到 ErrorCodeEnum ews_capture_create( const ews_capture_config_t *config,ews_capture_t *cap ) { CSimpleStringA tmp; int dev_id = ews_capture_get_video_device_id(config->strVideo,tmp); if((dev_id != -1)&&(cap->video == NULL)) { cap->video = ews_video_capture_create(cap, CAMERA_TYPE_EWS); if (!cap->video) { Dbg("create ews video object failed!"); return Error_Unexpect; } } else { return Error_Unexpect; } return Error_Succeed; } void ews_capture_destroy( ews_capture_t *cap ) { if (cap) { if (cap->video) { ews_video_capture_destroy(cap->video); cap->video = NULL; } if (cap->audio) { ews_audio_capture_destroy(cap->audio); cap->audio = NULL; } free(cap); } } ErrorCodeEnum ews_capture_start( ews_capture_t *cap ) { int rc = 0; if (cap->audio) { rc = ews_audio_capture_start(cap->audio); if (rc != Error_Succeed) { ErrorCodeEnum rslt = (ErrorCodeEnum)rc; if (rslt == Error_AudioIN) { Dbg("start audio In object failed! rc:%d", rc); } return rslt; } } else { Dbg("start ews audio Error_Unexpect"); return Error_Unexpect; } if (cap->video) { rc = ews_video_capture_start(cap->video); if (rc != Error_Succeed) { Dbg("start ews video capture object failed! rc:%d", rc); CSimpleStringA strCamFriendlyName; ews_capture_get_video_device_id(cap->config.strVideo.GetData(),strCamFriendlyName); char strMessage[MAX_PATH*2] = {0}; get_external_camera_exception_message(strMessage, MAX_PATH*2, strCamFriendlyName, "open ews camera fail,please check device"); LogError(Severity_Middle,Error_NotInit,ERROR_MOD_SURVEILLANCERECORDER_EWSCAM_OPEN,strMessage); return Error_EwsCamera; } } else { Dbg("start ews video Error_Unexpect"); return Error_Unexpect; } return (ErrorCodeEnum)rc; } void ews_capture_stop( ews_capture_t *cap ) { if (cap->audio) { ews_audio_capture_stop(cap->audio); } if (cap->video) { ews_video_capture_stop(cap->video); } } int ews_capture_detect_camera_bug( ews_capture_t *cap, int *ews_n ) { *ews_n = 0; if (cap->video) { if (cap->video->rtp_shm_queue) { *ews_n = cap->video->rtp_shm_queue->GetVideoLens(); } } else { *ews_n = -1; } return 0; } int ews_capture_get_last_frametime( ews_capture_t *cap, DWORD *ews_n ) { *ews_n = 0; if (cap->video) { if (cap->video->rtp_shm_queue) { *ews_n = cap->video->rtp_shm_queue->GetLastFrameTime(); } } else { *ews_n = 0; } return 0; } static int ews_audio_get_dev_count(int *in_cnt, int *out_cnt) { int icnt = 0, ocnt = 0; int cnt = Pa_GetDeviceCount(); for (int i = 0; i < cnt; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (info->maxInputChannels) icnt ++; if (info->maxOutputChannels) ocnt ++; } if (in_cnt) *in_cnt = icnt; if (out_cnt) *out_cnt = ocnt; return 0; } static CSimpleStringA ews_audio_get_dev_name(bool in_direction, int idx) { int cnt = Pa_GetDeviceCount(); int ii, i; for (i = 0, ii = 0; i < cnt; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (in_direction) { if (info->maxInputChannels) { if (idx == ii) { return CSimpleStringA(info->name); } ii++; } } else { if (info->maxOutputChannels) { if (idx == ii) { return CSimpleStringA(info->name); } ii++; } } } return CSimpleStringA(); } int ews_capture_lib_init() { HRESULT hr = CoInitialize(NULL); int rc; { HMODULE hModule = GetModuleHandleA("MSVCR100.dll"); if (hModule) { typedef char *(*f_setlocale)(int, const char*); f_setlocale f = (f_setlocale)GetProcAddress(hModule, "setlocale"); (*f)(LC_ALL, "chs"); } } if (SUCCEEDED(hr)) { PaError Error; Error = Pa_Initialize(); if (Error == paNoError) { rc = videoframework_init(); if (rc != 0) { Dbg("videoframework_init failed, rc=%d", rc); return Error_Resource; } } else { Dbg("PaInitialize failed, rc=%d", Error); return Error_Resource; } } else { Dbg("coinitialze failed! hr:%d", hr); return Error_Resource; } { int i, n; n = videocap_get_device_count(); for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, t1, strlen(t1)); Bin2Str(x, sizeof(x), t1, sizeof(t1)); } Dbg("%d = %s;%s", i, t, t1); } } { int icnt, ocnt; rc = ews_audio_get_dev_count(&icnt, &ocnt); if (rc == 0) { int i; Dbg("audio input devices(%d):", icnt); for (i = 0; i < icnt; ++i) { CSimpleStringA str = ews_audio_get_dev_name(true, i); Dbg("%d = %s", i, (LPCSTR)str); } Dbg("audio output devices(%d):", ocnt); for (i = 0; i < ocnt; ++i) { CSimpleStringA str = ews_audio_get_dev_name(false, i); Dbg("%d = %s", i, (LPCSTR)str); } } } return Error_Succeed; } void ews_capture_lib_term() { videoframework_term(); Pa_Terminate(); CoUninitialize(); } int ews_capture_get_audio_device_id( bool in_direction, const char *dev_name ) { int cnt = Pa_GetDeviceCount(); int ii, i; for (i = 0, ii = 0; i < cnt; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (in_direction) { if (info->maxInputChannels) { if (strstr(info->name, dev_name) != NULL) { return ii; } ii++; } } else { if (info->maxOutputChannels) { if (strstr(info->name, dev_name) != NULL) { return ii; } ii++; } } } return -1; } //int ews_capture_get_video_device_id( const char *dev_name ) //{ // int i, n; // // n = videocap_get_device_count(); // for (i = 0; i < n; ++i) { // WCHAR tmp[256]; // char t[256]; // WCHAR tmp1[256]; // char t1[256]; // videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); // WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); // videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); // WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // { // unsigned char x[MD5_DIGESTSIZE]; // md5_ctx_t ctx; // md5_init(&ctx); // md5(x, t1, strlen(t1)); // Bin2Str(x, sizeof(x), t1, sizeof(t1)); // } // strcat(t, ";"); // strcat(t, t1); // if (strcmp(dev_name, t) == 0) // return i; // } // return -1; // not found //} int ews_capture_get_video_device_id(const char *dev_inst_path, CSimpleStringA &ews_name) { int i, n; n = videocap_get_device_count(); for (i = 0; i < n; ++i) { WCHAR tmp1[256]; char t1[256]; videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath for (int j = 0; j < strlen(t1); ++j) { t1[j] = toupper(t1[j]); if (t1[j] == '#') t1[j] = '\\'; } if (strstr(t1,dev_inst_path) != NULL) { WCHAR tmp[256]={0}; char t[256]={0}; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); ews_name = t; Dbg("matched ews camera device: [%d] %s.", i, t1); return i; } else{ WCHAR tmp[256] = {0}; char t[256] = {0}; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); Dbg("device list: [%d] %s.", i, t1); } } return -1; // not found } bool ews_capture_check_video_device_match( const char *dev_name, const char*dev_inst_path ) { int i, n; n = videocap_get_device_count(); for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath (add by ly at 20160725) char t2[256]; strcpy(t2,t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, t1, strlen(t1)); Bin2Str(x, sizeof(x), t1, sizeof(t1)); } strcat(t, ";"); strcat(t, t1); if (strstr(t2,dev_inst_path) != NULL) { Dbg("[dbg] %s founded in %d cameras.", dev_inst_path, n); if (strcmp(dev_name, t) == 0) return true; } } return false; // not match } bool ews_capture_adj_brightness( ews_capture_t *cap,int nvalue,ErrorCodeEnum nCode ) { HRESULT rst = S_OK; if (cap->video&&(nCode!=Error_EwsCamera)&&(nCode!=Error_AllCamera)) { rst = videocap_adj_brightness(cap->video->cap,nvalue); } if (SUCCEEDED(rst)) return true; return false; } bool ews_capture_set_autobrightness( ews_capture_t *cap,ErrorCodeEnum nCode ) { HRESULT rst = S_OK; if (cap->video&&(nCode!=Error_EwsCamera)&&(nCode!=Error_AllCamera)) { rst = videocap_set_autobrightness(cap->video->cap); } if (SUCCEEDED(rst)) return true; return false; } int ews_capture_get_brightness( ews_capture_t *cap,ErrorCodeEnum nCode ) { int nValue=0; HRESULT rst = S_OK; if (cap->video&&(nCode!=Error_EwsCamera)&&(nCode!=Error_AllCamera)) { HRESULT rst = videocap_get_brightness(cap->video->cap,&nValue); } else { return -1; } if (SUCCEEDED(rst)) { return nValue; } else { return -1; } } int ews_stop_camera( ews_capture_t *cap ) { if (cap->video) { ews_video_capture_stop(cap->video); ews_video_capture_destroy(cap->video); cap->video = NULL; return 0; } else { return -1; } } int get_external_camera_exception_message(char* pBuffer, size_t uLen, CSimpleStringA strDeviceName, const char* strErrorMessage) { int iRet = 0; if (strDeviceName.GetLength() > 0){ const char* strCameraName = strDeviceName.GetData(); char strBuffer[MAX_PATH] = {0}; if (sprintf_s(strBuffer, MAX_PATH, "%s", strCameraName) > 0){ char *pIndex = NULL; if (pIndex = (char*)strstr(strBuffer, ";")){ *pIndex = '\0'; } } if (NULL != strErrorMessage){ size_t uDataLen = strlen(strBuffer); size_t uErrorLen = strlen(strErrorMessage); if (uLen > uDataLen + uErrorLen + 10){ iRet = sprintf_s(pBuffer, uLen, "[%s] %s", strBuffer, strErrorMessage); } } } if (0 == iRet){ if (NULL != strErrorMessage){ iRet = sprintf_s(pBuffer, uLen, "%s", strErrorMessage); } } return iRet; }