#ifdef _WIN32 #include "StdAfx.h" #include "io.h" #include #include #include #else #include #include #include #include #endif #include "libvideorecord_impl.h" #include "videoutil.h" #include #ifdef _WIN32 #pragma comment(lib, "winmm.lib") #pragma comment(lib, "legacy_stdio_definitions.lib") #endif #ifndef RECORD_SAFE_DELETE #define RECORD_SAFE_DELETE(p) \ do{ \ if(p){ \ delete p; \ p=NULL;\ } \ }\ while(0) #endif #ifndef _WIN32 unsigned int timeGetTime() { unsigned int uptime = 0; struct timespec on; if (clock_gettime(CLOCK_MONOTONIC, &on) == 0) uptime = on.tv_sec * 1000 + on.tv_nsec / 1000000; return uptime; } #endif #ifdef _WIN32 static bool FindMatchedFile(const char* sFindPath, const char* sFindFileName, std::string& sMatchedFile) { char sPath[MAX_PATH] = { 0 }; char sFormatFileName[MAX_PATH + 2] = "*"; WIN32_FIND_DATA FindFileData; HANDLE hFind; lstrcpy(sFormatFileName, sFindPath); if (sFindPath[strlen(sFindPath) - 1] != '\\') { lstrcat(sFormatFileName, "\\*"); } else { lstrcat(sFormatFileName, "*"); } lstrcat(sFormatFileName, sFindFileName); lstrcat(sFormatFileName, "*"); hFind = FindFirstFile(sFormatFileName, &FindFileData); if (hFind == INVALID_HANDLE_VALUE) { sMatchedFile = ""; return false; } else { sMatchedFile = FindFileData.cFileName; FindClose(hFind); } return true; } #else static bool FindMatchedFile(const char* sFindPath, const char* sFindFileName, std::string& sMatchedFile) { DIR* pDir = NULL; struct dirent* pFile = NULL; bool result = false; pDir = opendir(sFindPath); if (pDir == NULL) { return result; } while ((pFile = readdir(pDir)) != NULL) { if (pFile->d_type & DT_DIR) { //m_pHostApi->Debug("FindMatchedFile DIR name: %s.", pFile->d_name); if (strcmp(pFile->d_name, ".") == 0 || strcmp(pFile->d_name, "..") == 0) continue; char Path[256] = { 0 }; int len = strlen(sFindPath); strncpy(Path, sFindPath, len + 1); if (sFindPath[len - 1] != '/') strncat(Path, "/", 2); strncat(Path, pFile->d_name, strlen(pFile->d_name) + 1); //m_pHostApi->Debug("FindMatchedFile Path: %s.", Path); result = FindMatchedFile(Path, sFindFileName, sMatchedFile); if (result) { break; } } else { //m_pHostApi->Debug("FindMatchedFile FILE name: %s.", pFile->d_name); if (strstr(pFile->d_name, sFindFileName) != NULL) { char Path[256] = {0}; int len = strlen(sFindPath); strncpy(Path, sFindPath, len + 1); if (sFindPath[len - 1] != '/') strncat(Path, "/", 2); strncat(Path, pFile->d_name, strlen(pFile->d_name) + 1); //m_pHostApi->Debug("FindMatchedFile Finded: %s.", Path); sMatchedFile = Path; result = true; break; } } } closedir(pDir); return result; } #endif static bool ReNameFile(const char* file, const char* newfilename) { #ifdef _WIN32 if (!_access(file, 0)) #else if (!access(file, F_OK)) #endif //_WIN32 { if (!rename(file, newfilename)){ return true; } else{ return false; } } else{ return false; } } static uint32_t BindPCMAudioData(uint32_t uBufferLen, char* pLocalAudios, uint32_t uLocalAudioLen, char* pRemoteAudios, uint32_t uRemoteAudioLen, uint32_t uBitDeepth, eStereoArrayType eType) { uint32_t uRet = 0; if (0 == uLocalAudioLen || 0 == uBitDeepth || NULL == pLocalAudios || NULL == pRemoteAudios) { return uRet; } char* pBuffer = new char[uLocalAudioLen]; memset(pBuffer, 0, uLocalAudioLen); if (NULL != pLocalAudios) { memcpy(pBuffer, pLocalAudios, uLocalAudioLen); memset(pLocalAudios, 0, uBufferLen); } for (uint32_t i = 0; i < uLocalAudioLen / uBitDeepth; i++) { if (eLocalLeft == eType) { memcpy((uint32_t*)pLocalAudios + i, ((uint16_t*)(pBuffer)) + i, uBitDeepth); uint16_t* pindex = (uint16_t*)((uint32_t*)pLocalAudios + i) + 1; memcpy(pindex, ((uint16_t*)(pRemoteAudios)) + i, uBitDeepth); uRet += (2 * uBitDeepth); } else { memcpy((uint32_t*)pLocalAudios + i, ((uint16_t*)(pRemoteAudios)) + i, uBitDeepth); uint16_t* pindex = (uint16_t*)((uint32_t*)pLocalAudios + i) + 1; memcpy(pindex, ((uint16_t*)(pBuffer)) + i, uBitDeepth); uRet += (2 * uBitDeepth); } } RECORD_SAFE_DELETE(pBuffer); return uRet; } static uint32_t ConstructStereoAudioData(uint32_t uBufferLen, char* pAudiosBuffer, uint32_t uAudioLen, uint32_t uBitDeepth) { uint32_t uRet = 0; char* pBuffer = new char[uAudioLen]; memset(pBuffer, 0, uAudioLen); if (NULL != pAudiosBuffer) { memcpy(pBuffer, pAudiosBuffer, uAudioLen); memset(pAudiosBuffer, 0, uBufferLen); } for (int i = 0; i < uAudioLen / uBitDeepth; i++) { memcpy((uint32_t*)pAudiosBuffer + i, ((uint16_t*)(pBuffer)) + i, uBitDeepth); uint16_t* pindex = (uint16_t*)((uint32_t*)pAudiosBuffer + i) + 1; memcpy(pindex, ((uint16_t*)(pBuffer)) + i, uBitDeepth); uRet += (2 * uBitDeepth); } RECORD_SAFE_DELETE(pBuffer); return uRet; } static uint32_t Get8KOutPutBitRate(int iChannels, eAudioOutPutType eType) { uint32_t uOutBitRate = 16 * 8; if (1 == iChannels) { if (eUltraHD == eType) { uOutBitRate = 1000 * 8; } else if (eHighDefinition == eType) { uOutBitRate = 625 * 8; } else if (eStandardDefinition == eType) { uOutBitRate = 16 * 8; } else { uOutBitRate = 128 * 1000; } } else { if (eStandardDefinition == eType) { uOutBitRate = 16 * 8; } else if (eLowDefinition == eType) { uOutBitRate = iChannels * 128 * 1000; } else { uOutBitRate = 1500 * 8; } } return uOutBitRate; } static uint32_t Get16KOutPutBitRate(int iChannels, eAudioOutPutType eType) { uint32_t uOutBitRate = 1000 * 8; if (1 == iChannels) { if (eUltraHD == eType) { uOutBitRate = 2000 * 8; } else if (eHighDefinition == eType) { uOutBitRate = 1500 * 8; } else if (eStandardDefinition == eType) { uOutBitRate = 1000 * 8; } else { uOutBitRate = 1000 * 256; } } else { if (eStandardDefinition == eType) { uOutBitRate = 2000 * 8; } else { uOutBitRate = 2500 * 8; } } return uOutBitRate; } static uint32_t Get32KOutPutBitRate(int iChannels, eAudioOutPutType eType) { uint32_t uOutBitRate = 1000 * 8; if (1 == iChannels) { if (eUltraHD == eType) { uOutBitRate = 2500 * 8; } else { uOutBitRate = 1000 * 8; } } else { if (eStandardDefinition == eType) { uOutBitRate = 3000 * 8; } else { uOutBitRate = 6000 * 8; } } return uOutBitRate; } static uint32_t Get44KOutPutBitRate(int iChannels, eAudioOutPutType eType) { uint32_t uOutBitRate = 2000 * 8; if (1 == iChannels) { if (eUltraHD == eType) { uOutBitRate = 6000 * 8; } else if (eHighDefinition == eType) { uOutBitRate = 4000 * 8; } else { uOutBitRate = 2000 * 8; } } else { if (eStandardDefinition == eType) { uOutBitRate = 10000 * 8; } else if (eHighDefinition == eType) { uOutBitRate = 20000 * 8; } else { uOutBitRate = 40000 * 8; } } return uOutBitRate; } static uint32_t Get48KOutPutBitRate(int iChannels, eAudioOutPutType eType) { uint32_t uOutBitRate = 24000 * 8; if (eUltraHD == eType) { uOutBitRate = 24000 * 8; } else if (eHighDefinition == eType) { uOutBitRate = 20000 * 8; } else if (eStandardDefinition == eType) { uOutBitRate = 12000 * 8; } else { uOutBitRate = 8000 * 8; } return uOutBitRate; } static uint32_t GetAudioOutPutBitRate(int iInPutSamperate, int iChannels, eAudioOutPutType eType) { uint32_t uOutBitRate = 8000; switch (iInPutSamperate) { case 8000: uOutBitRate = Get8KOutPutBitRate(iChannels, eType); break; case 16000: uOutBitRate = Get16KOutPutBitRate(iChannels, eType); break; case 32000: uOutBitRate = Get32KOutPutBitRate(iChannels, eType); break; case 44100: uOutBitRate = Get44KOutPutBitRate(iChannels, eType); break; case 48000: uOutBitRate = Get48KOutPutBitRate(iChannels, eType); break; default: break; } return uOutBitRate; } static uint32_t ConvertStereo2Mono(char* pDstBuf, const uint32_t uDstLen, char* pSrcBuf, uint32_t uSrcLen, uint32_t uBitDeepth) { uint32_t uRet = 0; uint32_t uOneChannelLen = uSrcLen / 2; uint32_t i = 0; for (; i < uOneChannelLen / 2 && i < uDstLen / uBitDeepth; i++) { memcpy((uint16_t*)pDstBuf + i, ((uint32_t*)(pSrcBuf)) + i, uBitDeepth); } if (i == uOneChannelLen / 2) { uRet = uOneChannelLen; } return uRet; } static void __recordlog(void* user_data, const char* fmt, va_list arg) { libvideorecord_impl* pRecord = (libvideorecord_impl*)user_data; if (NULL != pRecord) { CHostApi* pHost = pRecord->GetHostApi(); if (NULL != pHost) { pHost->vDebug(RECORD_LOG_DEBUG, fmt, arg); } } } //视频录制线程 #ifdef _WIN32 static unsigned int __stdcall VideoRecordThread(void* pParam) { libvideorecord_impl* Record = (libvideorecord_impl*)pParam; int iRet = -1; iRet = Record->VideoRecord(); return iRet; } #else static void* VideoRecordThread(void* pParam) { libvideorecord_impl* Record = (libvideorecord_impl*)pParam; int iRet = -1; iRet = Record->VideoRecord(); return &iRet; } #endif libvideorecord_impl::libvideorecord_impl(bool* pResult, CHostApi* pHostAPI, const char* audioqueuename, const char* videoqueuename, const char* videoqueue2name, const char* salesaudioqueuename, const char* remotevideoqueuename, const char* remoteaudioqueuename) { m_bResult = pResult; m_pHostApi = pHostAPI; m_bStopRecord = false; m_bCloseVideo = false; m_eRecordType = eSingleSide; m_bIsAudioNsOn = false; m_iNsPolicy = 2; m_bIsAudioTransOn = false; m_bPauseRecord = false; m_eFormat = eMP4; m_pText = NULL; InitRecordParams(); m_bReNameVideo = false; m_nFps = 0; m_bWholeSection = false; m_bSessionManage = false; m_videoframe = new videoq_frame; memset(m_videoframe, 0, sizeof(videoq_frame)); m_audioframe = new audio_frame; memset(m_audioframe, 0, sizeof(audio_frame)); m_nRecordthreadId = 0; #ifdef _WIN32 m_hRecordThread = NULL; m_hEventWait = ::CreateEventA(NULL, true, 0, 0); if (!m_hEventWait) { *m_bResult = false; m_pHostApi->Debug(RECORD_LOG_ERROR, "create hEventWait failed!"); return; } #else sem_init(&m_semt, 0, 0); #endif InitMediaQueueInfos(audioqueuename, videoqueuename, videoqueue2name, salesaudioqueuename, remotevideoqueuename, remoteaudioqueuename); memset(m_FileName, 0, MAX_PATH); memset(m_VideoFileName, 0, MAX_PATH); memset(m_PathName, 0, MAX_PATH); memset(m_VideoFomat, 0, MAX_PATH); m_pFFmpegWriter = NULL; m_pAudioNsObj = NULL; m_bMuteAudio = false; *pResult = true; } libvideorecord_impl::~libvideorecord_impl() { RECORD_SAFE_DELETE(m_videoframe->data); RECORD_SAFE_DELETE(m_videoframe); RECORD_SAFE_DELETE(m_audioframe); #ifdef _WIN32 if (m_hRecordThread) { m_hRecordThread = NULL; } #endif RECORD_SAFE_DELETE(m_pFFmpegWriter); RECORD_SAFE_DELETE(m_local_audioqueue); RECORD_SAFE_DELETE(m_sales_audioqueue); m_audioqueue = NULL; RECORD_SAFE_DELETE(m_remote_audioqueue); RECORD_SAFE_DELETE(m_env_videoqueue); RECORD_SAFE_DELETE(m_opt_videoqueue); RECORD_SAFE_DELETE(m_remote_videoqueue); if (NULL != m_pAudioNsObj) { DestroyIAudioNsObj(m_pAudioNsObj); m_pAudioNsObj = NULL; } #ifdef _WIN32 if(NULL != m_hEventWait){ CloseHandle(m_hEventWait); m_hEventWait = NULL; } #else sem_destroy(&m_semt); #endif } CHostApi* libvideorecord_impl::GetHostApi() { return m_pHostApi; } bool libvideorecord_impl::InitMediaQueueInfos(const char* audioqueuename, const char* videoqueuename, const char* videoqueue2name, const char* salesaudioqueuename, const char* remotevideoqueuename, const char* remoteaudioqueuename) { m_audioqueue = NULL; m_env_videoqueue = NULL; m_opt_videoqueue = NULL; m_remote_videoqueue = NULL; m_remote_audioqueue = NULL; m_local_audioqueue = NULL; m_sales_audioqueue = NULL; memset(m_audioqueuename, 0, MAX_PATH); if (audioqueuename) { rvc_snprintf(m_audioqueuename, MAX_PATH, "%s", audioqueuename); } memset(m_env_videoqueuename, 0, MAX_PATH); if (videoqueuename) { rvc_snprintf(m_env_videoqueuename, MAX_PATH, "%s", videoqueuename); } memset(m_opt_videoqueuename, 0, MAX_PATH); if (videoqueue2name) { rvc_snprintf(m_opt_videoqueuename, MAX_PATH, "%s", videoqueue2name); } memset(m_salesaudioqueuename, 0, MAX_PATH); if (salesaudioqueuename) { rvc_snprintf(m_salesaudioqueuename, MAX_PATH, "%s", salesaudioqueuename); } memset(m_remotevideoqueuename, 0, MAX_PATH); if (remotevideoqueuename) { rvc_snprintf(m_remotevideoqueuename, MAX_PATH, "%s", remotevideoqueuename); } memset(m_remoteaudioqueuename, 0, MAX_PATH); if (remoteaudioqueuename) { rvc_snprintf(m_remoteaudioqueuename, MAX_PATH, "%s", remoteaudioqueuename); } return true; } bool libvideorecord_impl::EndRecord() { if (!m_pFFmpegWriter->StopWrite()) { return false; } RECORD_SAFE_DELETE(m_pFFmpegWriter); RECORD_SAFE_DELETE(m_videoframe->data); RECORD_SAFE_DELETE(m_pRecordAudioBuffer); RECORD_SAFE_DELETE(m_pText); RECORD_SAFE_DELETE(m_pRemoteAudioBuffer); #ifdef _WIN32 #else int ivalue = -1; do { sem_getvalue(&m_semt, &ivalue); if (ivalue > 0) { sem_wait(&m_semt); } } while (ivalue > 0); #endif return true; } bool libvideorecord_impl::Rvc_Timeout(int ms) { bool bTimeout = true; #ifdef _WIN32 DWORD dwRet = WaitForSingleObject(m_hEventWait, ms); if (WAIT_TIMEOUT == dwRet) { } else if (WAIT_OBJECT_0 == dwRet) { bTimeout = false; } #else struct timespec ts; clock_gettime(CLOCK_REALTIME, &ts); long unsec = ts.tv_nsec + (1000 * 1000 * ms); ts.tv_sec += (unsec / 1000000000); ts.tv_nsec = (unsec % 1000000000); int itimeout = sem_timedwait(&m_semt, &ts); if (0 != itimeout && (ETIMEDOUT == errno)) { } else if(0 == itimeout){ bTimeout = false; } #endif return bTimeout; } //开始录制视频 bool libvideorecord_impl::StartRecord() { bool bRet = false; #ifdef _WIN32 ResetEvent(m_hEventWait); #endif m_pFFmpegWriter = new FFmpegWriter(this); if (m_bIsAudioNsOn) { audions_callback_t t_callback = { 0 }; t_callback.debug = &__recordlog; t_callback.user_data = this; m_pAudioNsObj = CreateIAudioNsObj(&t_callback); } #ifdef _WIN32 m_hRecordThread = (HANDLE)_beginthreadex(NULL, 0, VideoRecordThread, (LPVOID)this, 0, (unsigned int*)&m_nRecordthreadId); bRet = true; #else if (0 == pthread_create(&m_nRecordthreadId, NULL, VideoRecordThread, (void*)this)) { bRet = true; //m_pHostApi->Debug(RECORD_LOG_INFO, "create video record thread and thread id is %u.", m_nRecordthreadId); } else { m_pHostApi->Debug(RECORD_LOG_INFO, "create video record thread failed."); } #endif return bRet; } //获取指定队列的SIZE int libvideorecord_impl::GetVideoFrameSize(int& nWidth, int& nHeight, Clibvideoqueue* queue) { int size = queue->GetFrameSize(nWidth, nHeight); return size; } int libvideorecord_impl::VideoRecord() { bool bResult = false; bool bInitRecordParam = false; int nRecordStartTime = 0; //本段录像开始时间 int iGetAudioFailedTimes = 0; InitRecordParams(); if (!InitCvxText()) { m_pHostApi->Debug(RECORD_LOG_INFO, "init cvxtext failed."); } GetVideoFullName(); while (!m_bStopRecord){ if (m_bPauseRecord) { Rvc_Timeout(1000); continue; } if (Rvc_Timeout(5)) { if (false == bInitRecordParam) { if (!InitVideoRecordParams()) { //参数初始化 break; } if (!StartRecordWrite()) { //开始录像 LogFailedEvent(eBeginFailed, "开始录像失败"); break; } bInitRecordParam = true; //当第一次记录时删除当前音频只剩下1帧,使音视频数据能够同步 if (m_audioqueue) { m_audioqueue->ClearAudioQueue(); } nRecordStartTime = timeGetTime();//本段录像开始时间 } //控制音视频的同步,精确控制录制频率,确保暂停足够时间 m_iRecordedTime = timeGetTime() - nRecordStartTime - m_iSubTitleTime; MediaSynchronous(); //计算经过的时间,计算已经经过几帧时间 m_iRecordedTime = timeGetTime() - nRecordStartTime - m_iSubTitleTime; int nVideoNum = (int)(((double)m_iRecordedTime / 1000.0) * (double)m_nFps); //如果是第1帧,或者又经过了1帧的时间,则录制视频,对视频进行压缩 if (((nVideoNum == 0) && (m_iRecordVideoNum == 0)) || (nVideoNum > m_iRecordVideoNum)) { int iRet = WriteVideoFrame(); if (-1 == iRet) { break; } } if (false == GetRecordAudioFrame()) { if (++iGetAudioFailedTimes > 200) { LogFailedEvent(eLocalAudioGetFailed, "连续5s获取本地音频失败"); break; } } else { iGetAudioFailedTimes = 0; } //录制音频,计算经过的时间,计算已经经过几帧时间 m_iRecordedTime = timeGetTime() - nRecordStartTime - m_iSubTitleTime; int nAudioNum = (int)((double)m_iRecordedTime / 1000.0); if ((nAudioNum > m_iRecordAudioNum) && (m_iAudioBufferLen >= m_iAudioPerSecBufLen)) { int iRet = WriteAudioFrame(); if (-1 == iRet) { break; } } //计算文件大小,如果文件大小大于10分钟则建立新文件记录 m_iRecordedTime = timeGetTime() - nRecordStartTime - m_iSubTitleTime; if (!m_bWholeSection) { if ((m_iRecordedTime > REC_MAX_FILE) || ((m_bCloseVideo || m_bReNameVideo) && m_bSessionManage)) { EndRecordWrite(); bInitRecordParam = false; nRecordStartTime = timeGetTime(); //本段录像开始时间 ReSetRecordParams(); ReNameVideo(); } } if (m_bPauseRecord) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "reset media queue."); ResetMediaQueue(); } } else{ if (eStand2Agent == m_eRecordType) { HandleLeftAudioData(); } m_pHostApi->Debug(RECORD_LOG_INFO, "stop video record.."); break; } } if (bInitRecordParam) { m_pHostApi->OnRecordFinished(); EndRecord(); m_pHostApi->OnASectionFinished(m_VideoFileName, m_iFileSerialNum, true); } else { m_pHostApi->Debug(RECORD_LOG_INFO, "not Init Record Param, Exit."); } return 0; } int libvideorecord_impl::HandleLeftAudioData() { int nAudioLens = m_audioqueue->GetAudioLens(); int nRemoteAudioLens = m_remote_audioqueue->GetAudioLens(); int iRet = -1; while (nAudioLens > 0){ GetRecordAudioFrame(); if (m_iAudioBufferLen >= m_iAudioPerSecBufLen){ iRet = WriteAudioFrame(); if (-1 == iRet) { break; } } nAudioLens = m_audioqueue->GetAudioLens(); nRemoteAudioLens = m_remote_audioqueue->GetAudioLens(); m_pHostApi->Debug(RECORD_LOG_DEBUG, "HandleLeftAudioData local audio length is %d, and remote audio length is %d.", nAudioLens, nRemoteAudioLens); } if (m_iAudioBufferLen > 0) { iRet = WriteAudioFrame(); } return iRet; } int libvideorecord_impl::GetDestTypeVideoFrameSize(int& iWidth, int& iHeight, eRvcRecordType eRecordType) { int iSize = 0; if (eSingleSide == eRecordType) { iSize = GetSingleSideVideoFrameSize(iWidth, iHeight); } else { iSize = GetDoubleSideVideoFrameSize(iWidth, iHeight); } return iSize; } bool libvideorecord_impl::GetRecordVideoFrameSize() { bool bRet = false; m_pHostApi->Debug(RECORD_LOG_DEBUG, "get video framesize, and record type is %s.", record_type_table[m_eRecordType]); for (int i = 0; i < RECORD_FAILED_MAX_TIMES; i++) { m_iVideoFrameSize = GetDestTypeVideoFrameSize(m_iWidth, m_iHeight, m_eRecordType); if (m_iVideoFrameSize <= 0) { if (Rvc_Timeout(50)) { if (0 != i && 0 == i % (RECORD_FAILED_MAX_TIMES / 10)) { m_pHostApi->Debug(RECORD_LOG_ERROR, "GetDestTypeVideoFrameSize failed %d times.", i); } } else { m_pHostApi->Debug(RECORD_LOG_INFO, "GetRecordVideoFrameSize return, user stop."); break; } } else { if (i > 0) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "get video framesize success, and failed times is %d.", i); } bRet = true; break; } } //if (bRet) { // m_pHostApi->Debug(RECORD_LOG_DEBUG, "get video framesize is %d, and width is %d, height is %d.", m_iVideoFrameSize, m_iWidth, m_iHeight); //} return bRet; } bool libvideorecord_impl::GetRecordAudioFrameSize() { bool bRet = false; for (int i = 0; i < RECORD_FAILED_MAX_TIMES; i++) { m_iAudioFrameSize = m_audioqueue->GetFrameSize(); if (m_iAudioFrameSize <= 0) { if (Rvc_Timeout(10)) { if (0 != i && 0 == i % (RECORD_FAILED_MAX_TIMES / 10)) { m_pHostApi->Debug(RECORD_LOG_ERROR, "get local audio frame size failed %d times.", i); } } else { m_pHostApi->Debug(RECORD_LOG_INFO, "GetRecordAudioFrameSize return, user stop."); break; } } else { if (i > 0) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "get audio framesize success, and failed times is %d.", i); } bRet = true; break; } } //if (bRet) { // m_pHostApi->Debug(RECORD_LOG_DEBUG, "local audio frame size = %d.", m_iAudioFrameSize); //} return bRet; } bool libvideorecord_impl::GetRecordAudioParams(audio_frame** paudio) { bool bRet = false; (*paudio)->data = new char[m_iAudioFrameSize]; for (int i = 0; i < RECORD_FAILED_MAX_TIMES; i++) { if (!m_audioqueue->GetAudio(*paudio)) { if (Rvc_Timeout(10)) { if (0 != i && 0 == i % (RECORD_FAILED_MAX_TIMES / 10)) { m_pHostApi->Debug(RECORD_LOG_ERROR, "GetAudio failed %d times.", i); } } else { m_pHostApi->Debug(RECORD_LOG_INFO, "GetAudio return, user stop."); break; } } else { bRet = true; break; } } RECORD_SAFE_DELETE((*paudio)->data); //if (bRet) { // m_pHostApi->Debug(RECORD_LOG_DEBUG, "audio samplespersec=%d,framesize=%d,channels=%d,format=%d,bitspersample=%d,series number=%d.", (*paudio)->samplespersec, (*paudio)->framesize, (*paudio)->nchannels, (*paudio)->format, (*paudio)->bitspersample, (*paudio)->iseriesnumber); //} return bRet; } bool libvideorecord_impl::InitRecordVideoFrameBuffer() { bool bRet = false; if (m_videoframe->data == NULL) { m_videoframe->data = new unsigned char[m_iVideoFrameSize]; m_videoframe->framesize = m_iVideoFrameSize; m_videoframe->height = m_iHeight; m_videoframe->width = m_iWidth; m_videoframe->format = VIDEO_FORMAT_RGB24; bRet = true; } return bRet; } bool libvideorecord_impl::CalcuRecordAudioParams(audio_frame* paudio) { bool bRet = false; //计算每秒音频的长度 m_iAudioPerSecBufLen = paudio->samplespersec * paudio->bitspersample / 8; m_iRecordAudioBufSize = m_iAudioPerSecBufLen; //m_pHostApi->Debug(RECORD_LOG_DEBUG, "m_iAudioPerSecBufLen=%d", m_iAudioPerSecBufLen); //计算音频帧的帧频, m_iLocalAudioFps = m_iAudioPerSecBufLen / paudio->framesize; //m_pHostApi->Debug(RECORD_LOG_DEBUG, "音频帧的帧频 m_iLocalAudioFps=%d", m_iLocalAudioFps); if (eSingleSide != m_eRecordType) { if (eStand2Agent == m_eRecordType) { //可视柜台渠道的双向录像,本地端和坐席端声音采用左右声道方式录制 m_iRemoteAudioFps = 50; m_iRecordAudioBufSize = m_iAudioPerSecBufLen * 2; } else { m_iRemoteAudioFps = 0; } m_pHostApi->Debug(RECORD_LOG_DEBUG, "calc remote audio fps = %d", m_iRemoteAudioFps); } else { if (2 == m_iAudioChannels) { m_iRecordAudioBufSize = m_iAudioPerSecBufLen * 2; } } //m_pHostApi->Debug(RECORD_LOG_DEBUG, "audio fps = %d, record audio buffer size = %d.", m_iLocalAudioFps, m_iRecordAudioBufSize); bRet = true; return bRet; } bool libvideorecord_impl::InitRecordAudioBuffer() { bool bRet = false; if (m_pRecordAudioBuffer == NULL) { m_pRecordAudioBuffer = new char[m_iRecordAudioBufSize]; memset(m_pRecordAudioBuffer, 0, m_iRecordAudioBufSize); } if (eSingleSide != m_eRecordType) { if (m_pRemoteAudioBuffer == NULL) { m_pRemoteAudioBuffer = new char[m_iAudioPerSecBufLen]; memset(m_pRemoteAudioBuffer, 0, m_iAudioPerSecBufLen); } } bRet = true; return bRet; } int libvideorecord_impl::GetSingleSideVideoFrameSize(int& iWidth, int& iHeight) { int iSize = 0; if (m_env_videoqueue == NULL) { m_pHostApi->Debug(RECORD_LOG_ERROR, "本地视频队列未初始化,不存在!"); return iSize; } int env_width = 0; int env_height = 0; if (m_opt_videoqueue) { int opt_width = 0; int opt_height = 0; iSize = m_env_videoqueue->GetFrameSize(env_width, env_height); if (iSize) { (env_width > env_height) ? (iWidth = iHeight = env_width) : (iWidth = iHeight = env_height); } else { iSize = m_opt_videoqueue->GetFrameSize(opt_width, opt_height); (opt_width > opt_height) ? (iWidth = iHeight = opt_width) : (iWidth = iHeight = opt_height); } if (!m_SubtitleParam.bSubtitle || !m_SubtitleParam.bSubtitleSection) { iSize = iWidth * iHeight * 3; } else { iHeight += 50; iSize = iWidth * iHeight * 3; } } else { if (!m_SubtitleParam.bSubtitle || !m_SubtitleParam.bSubtitleSection) { iSize = m_env_videoqueue->GetFrameSize(iWidth, iHeight); } else { iSize = m_env_videoqueue->GetFrameSize(env_width, env_height); iWidth = env_width; iHeight = env_height + 50; iSize = iWidth * iHeight * 3; } } return iSize; } int libvideorecord_impl::GetDoubleSideVideoFrameSize(int& iWidth, int& iHeight) { int iSize = 0; if (m_env_videoqueue == NULL) { m_pHostApi->Debug(RECORD_LOG_ERROR, "本地视频队列未初始化,不存在!"); return iSize; } //如果远端队列不为空,且录制双向视频 if (m_remote_videoqueue) { if (eStand2Agent == m_eRecordType) //大机以终端的视频大小确定画布,终端为640*360,远端固定为320*240,则画布为640*640+320*240 = 960*640 { //计算画布左边部分的分辨率,640*640 int env_width = 0; int env_height = 0; int opt_width = 0; int opt_height = 0; iSize = m_env_videoqueue->GetFrameSize(env_width, env_height); if (iSize){ (env_width > env_height) ? (iWidth = iHeight = env_width) : (iWidth = iHeight = env_height); } else{ iSize = m_opt_videoqueue->GetFrameSize(opt_width, opt_height); (opt_width > opt_height) ? (iWidth = iHeight = opt_width) : (iWidth = iHeight = opt_height); } if (iWidth > 0 && iHeight > 0) { //远端视频320*240 iWidth = iWidth + 480; if (!m_SubtitleParam.bSubtitle || !m_SubtitleParam.bSubtitleSection) { iSize = iWidth * iHeight * 3; } else { iHeight += 50; iSize = iWidth * iHeight * 3; } if (iSize > 0) { int remote_width = 0; int remote_height = 0; int iRemoteSize = m_remote_videoqueue->GetFrameSize(remote_width, remote_height); if (0 == iRemoteSize) { //m_pHostApi->Debug(RECORD_LOG_ERROR, "get remote video frame size failed!"); iSize = 0; } } } } } return iSize; } //从指定队列读取video bool libvideorecord_impl::GetVideoFrame(video_frame* Video, int flags, Clibvideoqueue* queue) { return queue->GetVideo2(Video, flags); } int libvideorecord_impl::GetDestTypeVideoFrameData(videoq_frame* Video, int flags, const bool bSwitchCam, int iInitCam, eRvcRecordType eRecordType) { int iRet = 0; if (m_opt_videoqueue) { iRet = GetDoubleCameraVideoFrameData(Video, flags, bSwitchCam, iInitCam, eRecordType); } else { iRet = GetSingleCameraVideoFrameData(Video, flags, eRecordType); } return iRet; } int libvideorecord_impl::GetSingleCameraVideoFrameData(videoq_frame* Video, int flags, eRvcRecordType eRecordType) { int iRet = 0; if (eSingleSide == m_eRecordType) { if (!m_SubtitleParam.bSubtitle || !m_SubtitleParam.bSubtitleSection){ return m_env_videoqueue->GetVideo(Video, flags) ? 0 : -1; } else{ bool bRslt = false; int width = 0; int height = 0; m_env_videoqueue->GetFrameSize(width, height); memset(Video->data, 0, Video->framesize); videoq_frame* tmp_frm = new videoq_frame; tmp_frm->data = Video->data + Video->width * 30 * 3; bRslt = m_env_videoqueue->GetVideo(tmp_frm, flags); if (!bRslt){ delete tmp_frm; m_pHostApi->Debug(RECORD_LOG_ERROR, "get env video fail!"); return -1; } delete tmp_frm; } } else { m_pHostApi->Debug(RECORD_LOG_DEBUG, "Invalid record type, the device type cannot record both way video."); return -4; } return iRet; } //获取env摄像头数据失败返回-1 //获取opt摄像头数据失败返回-2 //获取远端视频数据失败返回-3 int libvideorecord_impl::GetStand2SVideoFrameData(videoq_frame* Video, int flags, const bool bSwitchCam, int iInitCam, eRvcRecordType eRecordType) { if (!m_remote_videoqueue) { m_pHostApi->Debug(RECORD_LOG_ERROR, "remote video queue is null!"); return -4; } //将大机摄像头和远端视频拼接到一张画布中 memset(Video->data, 0, Video->framesize); bool bRslt = false; int width = 0; int height = 0; m_env_videoqueue->GetFrameSize(width, height); video_frame localtmp_frm = { 0 }; localtmp_frm.data[0] = new unsigned char[640 * 640 * 3]; localtmp_frm.linesize[0] = 640 * 3; localtmp_frm.format = m_videoframe->format; localtmp_frm.width = 640; localtmp_frm.height = 640; memset(localtmp_frm.data[0], 0, 640 * 640 * 3); int nActiveCam = iInitCam; if (bSwitchCam){ nActiveCam = m_pHostApi->GetRecordCamera(); } if (nActiveCam == 0) {// get env snapshot videoq_frame* tmp_frm = new videoq_frame; tmp_frm->data = localtmp_frm.data[0] + width * (width - height) / 2 * 3; //在画布中加上偏移量,横向摄像头拼接 bRslt = m_env_videoqueue->GetVideo(tmp_frm, flags); if (!bRslt) { delete tmp_frm; m_pHostApi->Debug(RECORD_LOG_ERROR, "stand device both way video, get env video fail!"); return -1; } //else { // m_pHostApi->Debug(RECORD_LOG_DEBUG, "stand device both way video, get env video success, and env video frame id is %d.", tmp_frm->iframeid); //} delete tmp_frm; } else if (nActiveCam == 1) {// get opt snapshot videoq_frame* tmp_frm = new videoq_frame; tmp_frm->data = localtmp_frm.data[0] + (width - height) / 2 * 3; //在画布中加上偏移量 //竖向摄像头图像,拼接 bRslt = m_opt_videoqueue->GetVideo3(tmp_frm, flags); if (!bRslt){ delete tmp_frm; m_pHostApi->Debug(RECORD_LOG_ERROR, "stand device both way video, get opt video fail!"); return -2; } //else { // m_pHostApi->Debug(RECORD_LOG_DEBUG, "stand device both way video, get opt video success, and opt video frame id is %d.", tmp_frm->iframeid); //} delete tmp_frm; } else{ m_pHostApi->Debug(RECORD_LOG_DEBUG, "nActiveCam value is not valid, and it's value is %d.", nActiveCam); return -4; } { //本地录像需要偏移量 int nOffset = 0; if (!m_SubtitleParam.bSubtitle || !m_SubtitleParam.bSubtitleSection){ nOffset = 0; } else{ nOffset = 25 * Video->width * 3; } #ifdef _WIN32 for (int i = 0; i < localtmp_frm.height && i < Video->height; i++){ memcpy(Video->data + nOffset + i * Video->width * 3, localtmp_frm.data[0] + i * localtmp_frm.linesize[0], localtmp_frm.linesize[0]); } #else int imaxheight = localtmp_frm.height > Video->height ? Video->height : localtmp_frm.height; if (0 == nActiveCam) { for (int i = imaxheight - 1, j = 0; i >= 0 && j < imaxheight; i--, j++){ memcpy(Video->data + nOffset + j * Video->width * 3, localtmp_frm.data[0] + i * localtmp_frm.linesize[0], localtmp_frm.linesize[0]); } } else { for (int i = 0; i < imaxheight; i++){ memcpy(Video->data + nOffset + i * Video->width * 3, localtmp_frm.data[0] + i * localtmp_frm.linesize[0], localtmp_frm.linesize[0]); } } #endif } delete localtmp_frm.data[0]; //拼接远端视频,合成一个图像 { int w = 0, h = 0; int nsize = GetVideoFrameSize(w, h, m_remote_videoqueue); video_frame tmp_frm = { 0 }; tmp_frm.data[0] = new unsigned char[nsize]; tmp_frm.linesize[0] = w * 3; tmp_frm.format = m_videoframe->format; tmp_frm.width = w; tmp_frm.height = h; bool bGetRemotevideo = false; bGetRemotevideo = GetVideoFrame(&tmp_frm, 1, m_remote_videoqueue); if (bGetRemotevideo){ #ifdef _WIN32 for (int i = 0; i < tmp_frm.height && i < Video->height; i++){ memcpy(Video->data + ((Video->height - tmp_frm.height) / 2 + i) * Video->width * 3 + 640 * 3, tmp_frm.data[0] + i * tmp_frm.linesize[0], tmp_frm.linesize[0]); } #else int imaxheight = tmp_frm.height > Video->height ? Video->height : tmp_frm.height; for (int i = imaxheight - 1, j = 0; i >= 0 && j < imaxheight; i--, j++){ memcpy(Video->data + ((Video->height - tmp_frm.height) / 2 + j) * Video->width * 3 + 640 * 3, tmp_frm.data[0] + i * tmp_frm.linesize[0], tmp_frm.linesize[0]); } #endif } delete tmp_frm.data[0]; if (!bGetRemotevideo){ m_pHostApi->Debug(RECORD_LOG_ERROR, "remote video queue get video failed!"); return -3; } //else { // m_pHostApi->Debug(RECORD_LOG_DEBUG, "stand device both way video, remote video queue get video success, and remote video frame id is %d.", tmp_frm.iframeid); //} } return 0; } int libvideorecord_impl::GetSingleSideVideoFrameData(videoq_frame* Video, int flags, const bool bSwitchCam, int iInitCam) { bool bRslt = false; int iwidth = 0; int iheight = 0; m_env_videoqueue->GetFrameSize(iwidth, iheight); memset(Video->data, 0, Video->framesize); int nActiveCam = iInitCam; if (bSwitchCam) { nActiveCam = m_pHostApi->GetRecordCamera(); } if (0 == nActiveCam) { // get env snapshot videoq_frame* tmp_frm = new videoq_frame; if (!m_SubtitleParam.bSubtitle || !m_SubtitleParam.bSubtitleSection) { tmp_frm->data = Video->data + Video->width * (iwidth - iheight) / 2 * 3; } else { tmp_frm->data = Video->data + Video->width * ((iwidth - iheight) / 2 + 30) * 3; } bRslt = m_env_videoqueue->GetVideo(tmp_frm, flags); delete tmp_frm; if (!bRslt) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "get env video fail!"); return -1; } } else if (1 == nActiveCam) { // get opt snapshot videoq_frame* tmp_frm = new videoq_frame; m_opt_videoqueue->GetFrameSize(iwidth, iheight); if (!m_SubtitleParam.bSubtitle || !m_SubtitleParam.bSubtitleSection) { tmp_frm->data = Video->data + (iheight - iwidth) / 2 * 3; } else { tmp_frm->data = Video->data + (Video->width * 30 + (iheight - iwidth) / 2) * 3; } //横向摄像头图像,拼接 bRslt = m_opt_videoqueue->GetVideo3(tmp_frm, flags); delete tmp_frm; if (!bRslt) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "get opt video fail!"); return -2; } } else { m_pHostApi->Debug(RECORD_LOG_DEBUG, "nActiveCam value is not valid, and it's value is %d.", nActiveCam); return -1; } return 0; } int libvideorecord_impl::GetDoubleCameraVideoFrameData(videoq_frame* Video, int flags, const bool bSwitchCam, int iInitCam, eRvcRecordType eRecordType) { int iRet = 0; if (eStand2Agent == m_eRecordType) { //如果远端队列不为空,且录制双向视频 iRet = GetStand2SVideoFrameData(Video, flags, bSwitchCam, iInitCam, eRecordType); } else if(eSingleSide == eRecordType){ iRet = GetSingleSideVideoFrameData(Video, flags, bSwitchCam, iInitCam); } else { m_pHostApi->Debug(RECORD_LOG_ERROR, "invalid record type."); iRet = -1; } return iRet; } bool libvideorecord_impl::IsRecordingCamError(int iRecordingCam, eRvcRecordType eRecordType) { bool bRet = false; if (eStand2Agent == eRecordType) { if (0 == iRecordingCam) { if ('E' == m_pHostApi->GetCameraState() || 'B' == m_pHostApi->GetCameraState()) { bRet = true; } } else { if ('O' == m_pHostApi->GetCameraState() || 'B' == m_pHostApi->GetCameraState() || 'E' == m_pHostApi->GetCameraState()) { bRet = true; } } } else { if ('B' == m_pHostApi->GetCameraState()) { bRet = true; } } return bRet; } bool libvideorecord_impl::InitCvxText() { bool bRet = false; #ifdef _WIN32 string TtcDir = "C:\\Windows\\Fonts\\"; #else string TtcDir = "./"; #endif string fontfile = ""; #ifdef _WIN32 if (FindMatchedFile(TtcDir.c_str(), "msyh.", fontfile)) #else if (FindMatchedFile(TtcDir.c_str(), "wryh.", fontfile)) #endif { //m_pHostApi->Debug(RECORD_LOG_INFO, "Fontfile:%s", fontfile.c_str()); TtcDir += fontfile; m_pText = new CvxText(TtcDir.c_str()); int type = 0; #ifdef _WIN32 CvScalar scalar = { 12, 0.2, 0.1, 0 }; //字体大小/空白比例/间隔比例/旋转角度 #else CvScalar scalar = { 16, 0.3, 0.2, 0 }; //字体大小/空白比例/间隔比例/旋转角度 #endif bool underline = false; float diaphaneity = 1.0; m_pText->setFont(&type, &scalar, &underline, &diaphaneity); //字体类型/size/下滑线/透明度 bRet = true; } else { if (!m_pText) { if (m_SubtitleParam.bSubtitle) { LogFailedEvent(eFontNULL, "Font is null, subtitles cannot be added!"); } } } return bRet; } void libvideorecord_impl::GetVideoFullName() { if (m_bWholeSection || !m_bSessionManage) { rvc_snprintf(m_VideoFileName, MAX_PATH, "%s%s_%d.%s", m_PathName, m_FileName, m_iFileSerialNum, m_VideoFomat); } else { rvc_snprintf(m_VideoFileName, MAX_PATH, "%s%s_%d_end.%s", m_PathName, m_FileName, m_iFileSerialNum, m_VideoFomat); } m_pHostApi->Debug(RECORD_LOG_DEBUG, "m_VideoFileName = %s", m_VideoFileName); } bool libvideorecord_impl::AddCvxText(unsigned char** pData, int iDataSize, int iWidth, int iHeight) { bool bRet = false; #ifdef _WIN32 char timeSubtitle[MAX_PATH] = { 0 }; const char* weekDict[] = { "日","一","二","三","四","五","六" }; SYSTEMTIME st; IplImage* pImg = cvCreateImage(cvSize(iWidth, iHeight), IPL_DEPTH_8U, 3); memcpy(pImg->imageData, *pData, iDataSize); cvFlip(pImg); // 先把图像翻转 if (strlen(m_SubtitleParam.topSubtitleData) <= 0) { if (m_SubtitleParam.bSubtitleSection) { GetLocalTime(&st); _snprintf(timeSubtitle, MAX_PATH, "%4d年%02d月%02d日 星期%s %02d:%02d:%02d", st.wYear, st.wMonth, st.wDay, weekDict[st.wDayOfWeek], st.wHour, st.wMinute, st.wSecond); } else { GetLocalTime(&st); _snprintf(timeSubtitle, MAX_PATH, "%4d/%02d/%02d %02d:%02d:%02d", st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond); } m_pText->putText(pImg, timeSubtitle, cvPoint(10, 12)); } else { m_pText->putText(pImg, m_SubtitleParam.topSubtitleData, cvPoint(10, 12)); } if (strlen(m_SubtitleParam.bottomSubtitleData1) > 0) { m_pText->putText(pImg, m_SubtitleParam.bottomSubtitleData1, cvPoint(10, pImg->height - 17)); } if (strlen(m_SubtitleParam.bottomSubtitleData2) > 0) { m_pText->putText(pImg, m_SubtitleParam.bottomSubtitleData2, cvPoint(10, pImg->height - 3)); } //拼接后的右侧视频添加坐席信息 if (strlen(m_SubtitleParam.strRightAgentInfo) > 0) { if (eStand2Agent == m_eRecordType) { m_pText->putText(pImg, m_SubtitleParam.strRightAgentInfo, cvPoint(10 + 640, pImg->height - 17)); } else { m_pText->putText(pImg, m_SubtitleParam.strRightAgentInfo, cvPoint(10 + 320, pImg->height - 17)); } } cvFlip(pImg); // 添完字幕再翻转 #else wchar_t timeSubtitle[MAX_PATH] = { 0 }; const char* weekDict[] = { "日","一","二","三","四","五","六" }; IplImage* pImg = cvCreateImage(cvSize(iWidth, iHeight), IPL_DEPTH_8U, 3); memcpy(pImg->imageData, *pData, iDataSize); //cvFlip(pImg); // 先把图像翻转 if (strlen(m_SubtitleParam.topSubtitleData) <= 0) { struct tm* pst = NULL; time_t t = time(NULL); pst = localtime(&t); if (m_SubtitleParam.bSubtitleSection) { swprintf(timeSubtitle, MAX_PATH, L"%4d年%02d月%02d日 星期%s %02d:%02d:%02d", pst->tm_year + 1900, pst->tm_mon + 1, pst->tm_mday, weekDict[pst->tm_wday], pst->tm_hour, pst->tm_min, pst->tm_sec); } else { swprintf(timeSubtitle, MAX_PATH, L"%4d/%02d/%02d %02d:%02d:%02d", pst->tm_year + 1900, pst->tm_mon + 1, pst->tm_mday, pst->tm_hour, pst->tm_min, pst->tm_sec); } m_pText->putText(pImg, timeSubtitle, cvPoint(10, 42)); //m_pHostApi->Debug(RECORD_LOG_INFO, "水印信息为:%s.", timeSubtitle); } else { m_pText->putText(pImg, m_SubtitleParam.topSubtitleData, cvPoint(10, 42)); } if (wcslen(m_SubtitleParam.bottomSubtitleData1) > 0) { m_pText->putText(pImg, m_SubtitleParam.bottomSubtitleData1, cvPoint(10, pImg->height - 47)); } if (wcslen(m_SubtitleParam.bottomSubtitleData2) > 0) { m_pText->putText(pImg, m_SubtitleParam.bottomSubtitleData2, cvPoint(10, pImg->height - 23)); } //拼接后的右侧视频添加坐席信息 if (wcslen(m_SubtitleParam.strRightAgentInfo) > 0) { if (eStand2Agent == m_eRecordType) { m_pText->putText(pImg, m_SubtitleParam.strRightAgentInfo, cvPoint(10 + 640, pImg->height - 47)); } else { m_pText->putText(pImg, m_SubtitleParam.strRightAgentInfo, cvPoint(10 + 320, pImg->height - 47)); } } #endif memcpy(*pData, pImg->imageData, iDataSize); cvReleaseImage(&pImg); bRet = true; return bRet; } bool libvideorecord_impl::ReNameVideo() { bool bRet = false; if (m_bSessionManage) { //sessionid切换,重命名录像文件 if (m_bReNameVideo) { if (m_iFileSerialNum > 0)//如果当前文件序号大于0,说明前一个文件与当前文件是同一个SESSION,需要修改前一个文件名+END,否则直接改当前文件的文件名 { //sessionid切换,前一个文件需要加上END char m_NewFileName[MAX_PATH] = { 0 }; char m_OldFileName[MAX_PATH] = { 0 }; string FileName(m_VideoFileName); string name = FileName.substr(0, FileName.find_first_of('_') + 1); //m_pHostApi->Debug("renamefile1 name = %s",name.c_str()); //前一个文件名 rvc_snprintf(m_OldFileName, MAX_PATH, "%s%d.%s", name.c_str(), m_iFileSerialNum - 1, m_VideoFomat); rvc_snprintf(m_NewFileName, MAX_PATH, "%s%d_end.%s", name.c_str(), m_iFileSerialNum - 1, m_VideoFomat); m_pHostApi->Debug(RECORD_LOG_DEBUG, "renamefile1 from %s to %s", m_OldFileName, m_NewFileName); ReNameFile(m_OldFileName, m_NewFileName); } //sessionid切换,当前文件需要重命名 char m_NewFileName[MAX_PATH] = { 0 }; m_iFileSerialNum = 0; rvc_snprintf(m_NewFileName, MAX_PATH, "%s%s_%d.%s", m_PathName, m_FileName, m_iFileSerialNum, m_VideoFomat); m_pHostApi->Debug(RECORD_LOG_DEBUG, "renamefile2 from %s to %s", m_VideoFileName, m_NewFileName); if (ReNameFile(m_VideoFileName, m_NewFileName)) { m_pHostApi->OnASectionFinished(m_NewFileName, m_iFileSerialNum, false); m_iFileSerialNum++; } m_bReNameVideo = false; } else { //继续录像,删除当前文件的"END" char m_NewFileName[MAX_PATH] = { 0 }; rvc_snprintf(m_NewFileName, MAX_PATH, "%s%s_%d.%s", m_PathName, m_FileName, m_iFileSerialNum, m_VideoFomat); m_pHostApi->Debug(RECORD_LOG_DEBUG, "renamefile3 from %s to %s", m_VideoFileName, m_NewFileName); ReNameFile(m_VideoFileName, m_NewFileName); m_pHostApi->OnASectionFinished(m_NewFileName, m_iFileSerialNum, false); //WMV文件的序列号 m_iFileSerialNum++; } //新文件增加END rvc_snprintf(m_VideoFileName, MAX_PATH, "%s%s_%d_end.%s", m_PathName, m_FileName, m_iFileSerialNum, m_VideoFomat); m_pHostApi->Debug(RECORD_LOG_DEBUG, "generate new file name %s", m_VideoFileName); m_bCloseVideo = false; } else { //WMV文件的序列号 m_pHostApi->OnASectionFinished(m_VideoFileName, m_iFileSerialNum, false); m_iFileSerialNum++; rvc_snprintf(m_VideoFileName, MAX_PATH, "%s%s_%d.%s", m_PathName, m_FileName, m_iFileSerialNum, m_VideoFomat); } bRet = true; return bRet; } bool libvideorecord_impl::InitAudioQueue() { if (strlen(m_audioqueuename) > 0) { m_local_audioqueue = new Clibaudioqueue(m_audioqueuename); } else { m_local_audioqueue = NULL; } if (strlen(m_salesaudioqueuename) > 0) { m_sales_audioqueue = new Clibaudioqueue(m_salesaudioqueuename); } else { m_sales_audioqueue = NULL; } if (strlen(m_remoteaudioqueuename) > 0) { m_remote_audioqueue = new Clibaudioqueue(m_remoteaudioqueuename); } else { m_remote_audioqueue = NULL; } return true; } bool libvideorecord_impl::InitVideoQueue() { if (strlen(m_env_videoqueuename) > 0) { m_env_videoqueue = new Clibvideoqueue(m_env_videoqueuename); } else { m_env_videoqueue = NULL; } if (strlen(m_opt_videoqueuename) > 0) { m_opt_videoqueue = new Clibvideoqueue(m_opt_videoqueuename); } else { m_opt_videoqueue = NULL; } if (strlen(m_remotevideoqueuename) > 0) { m_remote_videoqueue = new Clibvideoqueue(m_remotevideoqueuename); } else { m_remote_videoqueue = NULL; } return true; } bool libvideorecord_impl::InitRecordParams() { m_iFileSerialNum = 0; m_iVideoFrameSize = 0; m_iWidth = 0; m_iHeight = 0; m_iAudioPerSecBufLen = 0; m_iLocalAudioFps = 0; m_iRemoteAudioFps = 0; m_pRemoteAudioBuffer = NULL; m_pRecordAudioBuffer = NULL; m_iRecordAudioBufSize = 0; m_iRecordedTime = 0; m_iRemoteAudioNum = 0; m_iAudioFromQueNum = 0; m_iRecordVideoNum = 0; m_iRecordAudioNum = 0; m_iSubTitleTime = 0; m_iRemoteAudioBufLen = 0; m_iAudioBufferLen = 0; m_bStopRecord = false; //每次启动录像重新初始化 m_bCloseVideo = false; m_bReNameVideo = false; m_iRemoteVideoFailedTimes = 0; m_bSwitchCam = true; return true; } bool libvideorecord_impl::InitRecordWriter(char* filename, int width, int height, int colorbit, int nfps, int nSamplePsec, int nchannels, int nBitPerSample, int nmaxspacing, int nquality, int nOutBitRate, int iAudioType) { bool bResult = false; bResult = m_pFFmpegWriter->InitWriter(filename, width, height, 24, nfps, nSamplePsec, nchannels, nBitPerSample, 6, nquality, nOutBitRate, (int)iAudioType); return bResult; } bool libvideorecord_impl::StartRecordWrite() { bool bResult = false; bResult = m_pFFmpegWriter->StartWrite(); return bResult; } bool libvideorecord_impl::EndRecordWrite() { bool bResult = false; bResult = m_pFFmpegWriter->StopWrite(); return bResult; } bool libvideorecord_impl::ReceiveRecordVideoData(unsigned char* pData, unsigned long len) { bool bResult = false; bResult = m_pFFmpegWriter->ReceiveVideoData(pData, len); return bResult; } bool libvideorecord_impl::ReceiveRecordAudioData(unsigned char* pData, unsigned long len) { bool bResult = false; bResult = m_pFFmpegWriter->ReceiveAudioData(pData, len); return bResult; } #ifdef _WIN32 bool libvideorecord_impl::setRightWaterMark(const char* strWaterMark, uint32_t ulen) #else bool libvideorecord_impl::setRightWaterMark(const wchar_t* strWaterMark, uint32_t ulen) #endif { bool bRet = false; memset(m_SubtitleParam.strRightAgentInfo, 0, MAX_PATH); if (ulen < MAX_PATH) { memcpy(m_SubtitleParam.strRightAgentInfo, strWaterMark, ulen); bRet = true; } return bRet; } void libvideorecord_impl::Debug(const char* fmt, ...) { va_list arg; va_start(arg, fmt); m_pHostApi->vDebug(RECORD_LOG_DEBUG, fmt, arg); va_end(arg); } void libvideorecord_impl::vDebug(const char* str, va_list list) { m_pHostApi->vDebug(RECORD_LOG_DEBUG, str, list); } bool libvideorecord_impl::ContinueRecord() //继续录像 { m_pHostApi->Debug(RECORD_LOG_DEBUG, "continue record"); #ifdef _WIN32 #else InitAudioQueue(); m_audioqueue = m_local_audioqueue; InitVideoQueue(); #endif m_bPauseRecord = false; return true; } void libvideorecord_impl::ChangeFilename(const char* newfilename) { if (!m_bWholeSection) { memset(m_FileName, 0, MAX_PATH); memcpy(m_FileName, newfilename, strlen(newfilename)); m_bReNameVideo = true; } } bool libvideorecord_impl::StopVideoRecord() //退出 { m_bStopRecord = true; #ifdef _WIN32 SetEvent(m_hEventWait); WaitForSingleObject(m_hRecordThread, INFINITE); CloseHandle(m_hRecordThread); m_hRecordThread = NULL; #else sem_post(&m_semt); if (0 == pthread_join(m_nRecordthreadId, NULL)) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "thread join video record thread %u success!", m_nRecordthreadId); m_nRecordthreadId = 0; } else { m_pHostApi->Debug(RECORD_LOG_INFO, "thread join video record thread failed!"); } #endif m_eRecordType = eSingleSide; return true; } bool libvideorecord_impl::PauseRecord() //暂停录像 { m_pHostApi->Debug(RECORD_LOG_DEBUG, "pause record"); m_bPauseRecord = true; return true; } bool libvideorecord_impl::StartVideoRecord(int fps, int videoquality, eVideoFormat eFormat, const SubtitleParam* subtitleParam, bool bWholeSection, bool bSessionManage, const char* pathname, int pathlength, const char* pfilename, int filenamelength, Rvc_RecordAudioParam_t* pAudioParam)//开始记录 { m_audioqueue = NULL; InitAudioQueue(); InitVideoQueue(); memset(m_FileName, 0, MAX_PATH); memset(m_PathName, 0, MAX_PATH); if (filenamelength == 0) { #ifdef _WIN32 SYSTEMTIME nowTime; // 系统时间结构体 GetLocalTime(&nowTime); rvc_snprintf(m_FileName, MAX_PATH, "%4d%02d%02d%02d%02d%02d", nowTime.wYear, nowTime.wMonth, nowTime.wDay, nowTime.wHour, nowTime.wMinute, nowTime.wSecond); #else struct tm* pst = NULL; time_t t = time(NULL); pst = localtime(&t); rvc_snprintf(m_FileName, MAX_PATH, "%4d%02d%02d%02d%02d%02d", pst->tm_year + 1900, pst->tm_mon + 1, pst->tm_mday, pst->tm_hour, pst->tm_min, pst->tm_sec); #endif } else { memcpy(m_FileName, pfilename, filenamelength); } m_eFormat = eFormat; rvc_snprintf(m_VideoFomat, MAX_PATH, "%s", RECORD_MP4_SUFFIX); if (pathlength != 0) { memcpy(m_PathName, pathname, pathlength); } m_nFps = fps; if (videoquality > 0 || videoquality <= 100) { m_videoquality = videoquality; } else { m_pHostApi->Debug(RECORD_LOG_ERROR, "warn: videoquality is illegal, will use default instead!"); m_videoquality = 75; } memset(&m_SubtitleParam, 0, sizeof(SubtitleParam)); if (subtitleParam != NULL) { memcpy(&m_SubtitleParam, subtitleParam, sizeof(SubtitleParam)); } m_bWholeSection = bWholeSection; m_bSessionManage = bSessionManage; m_eRecordType = pAudioParam->eRecordType; m_eAudioType = pAudioParam->eOutPutType; m_bIsAudioNsOn = pAudioParam->bIsNsOn; m_iNsPolicy = pAudioParam->iNsPolicy; m_bIsAudioTransOn = pAudioParam->bIsTransOn; m_iAudioChannels = pAudioParam->iAudioChannels; m_bMuteAudio = pAudioParam->bMuteAudioMode; m_audioqueue = m_local_audioqueue; if (m_sales_audioqueue && (eSingleSide == m_eRecordType || eStand2Agent == m_eRecordType)) //当大机进行双录拼接时使用本地音频队列会出现抢占导致取音频失败,此处选择使用销售音频队列 { m_audioqueue = m_sales_audioqueue; } //m_pHostApi->Debug(RECORD_LOG_INFO, "record output audio quality type is = %s, noise suppression flag is %s, noise suppression policy is %d, audio transfer flag is %s.", audio_quality_type_table[m_eAudioType], m_bIsAudioNsOn ? "true" : "false", m_iNsPolicy, m_bIsAudioTransOn ? "true" : "false"); #ifdef _WIN32 if (m_hRecordThread == NULL) { return StartRecord(); } else { return false; } #else if (0 != m_nRecordthreadId) { StopVideoRecord(); } sem_init(&m_semt, 0, 0); return StartRecord(); #endif } void libvideorecord_impl::LogFailedEvent(eRvcRecordFailedCase eCase, const char* pszMessage, bool bRecordDevFault) { *m_bResult = false; m_bStopRecord = true; m_pHostApi->OnRecordFailed(eCase, pszMessage, bRecordDevFault); } void libvideorecord_impl::MediaSynchronous() { int isleeptime = 0; if (m_nFps < m_iLocalAudioFps) { if (eSingleSide != m_eRecordType)//如果双向混音,则sleeptime以远端为准 { if ((m_iRemoteAudioFps != 0) && (m_iRemoteAudioFps > m_iLocalAudioFps)) { isleeptime = (int)((m_iRemoteAudioNum + 1) * 1000 / m_iRemoteAudioFps - m_iRecordedTime); } else { isleeptime = (int)((m_iAudioFromQueNum + 1) * 1000 / m_iLocalAudioFps - m_iRecordedTime); } } else { isleeptime = (int)((m_iAudioFromQueNum + 1) * 1000 / m_iLocalAudioFps - m_iRecordedTime); } } else { isleeptime = (int)((m_iRecordVideoNum + 1) * 1000 / m_nFps - m_iRecordedTime); } if (isleeptime > 0) { Rvc_Timeout(isleeptime); } } int libvideorecord_impl::WriteVideoFrame() { int iret = 0; static int nInitActiveCam = 0; bool bVideoLens = (m_opt_videoqueue == NULL) ? (m_env_videoqueue->GetVideoLens() > 0) : ((m_env_videoqueue->GetVideoLens() > 0) || (m_opt_videoqueue->GetVideoLens() > 0)); if (bVideoLens) { bool bGetvideo = false; //获取拼接后的视频帧数据 if (eStand2Agent == m_eRecordType) { if (m_bSwitchCam) { nInitActiveCam = m_pHostApi->GetRecordCamera(); m_pHostApi->Debug(RECORD_LOG_INFO, "Stand2S Device Double Record local use %d CAMERA_TYPE_ENV[0] CAMERA_TYPE_OPT[1] Camera.", nInitActiveCam); m_bSwitchCam = false; } } int iGetvideo = GetDestTypeVideoFrameData(m_videoframe, VIDEOQUEUE_FLAG_VERTICAL_FLIP, m_bSwitchCam, nInitActiveCam, m_eRecordType); //m_pHostApi->Debug(RECORD_LOG_DEBUG, "GetDestTypeVideoFrameData result = %d.", iGetvideo); if (0 != iGetvideo) { if (eStand2Agent == m_eRecordType) { if (IsRecordingCamError(nInitActiveCam)) { LogFailedEvent(eVideoGetFailed, "remote recording camera error!"); return -1; } else { if (-3 == iGetvideo){ if (!m_bStopRecord) { m_iRemoteVideoFailedTimes++; m_pHostApi->Debug(RECORD_LOG_ERROR, "Get Remote Video Frame Failed time is %d.", m_iRemoteVideoFailedTimes); if (GET_REMOTEVIDEO_FAILED_MAX_TIMES > m_iRemoteVideoFailedTimes) { return 0; } else{ LogFailedEvent(eRemoteVideoGetFailed, "Get Remote Video Frame Failed.", false); return -1; } } } } } else { if (IsRecordingCamError(nInitActiveCam, eSingleSide)) { LogFailedEvent(eVideoGetFailed, "remote recording camera error!"); return -1; } else { Rvc_Timeout(2); return -2; } } } // 加入字幕 if (m_SubtitleParam.bSubtitle) { unsigned int nStartTime = timeGetTime(); AddCvxText(&m_videoframe->data, m_iVideoFrameSize, m_iWidth, m_iHeight); m_iSubTitleTime += (timeGetTime() - nStartTime); } if (!ReceiveRecordVideoData((unsigned char*)m_videoframe->data, m_iVideoFrameSize)) { int iremote_audio_length = 0; if (m_remote_audioqueue) { iremote_audio_length = m_remote_audioqueue->GetAudioLens(); } char strmsg[MAX_PATH] = { 0 }; rvc_snprintf(strmsg, MAX_PATH, "视频流写入失败, remote_audio_length = %d, nVideoFrameSize = %d, noldvideoNum = %d", iremote_audio_length, m_iVideoFrameSize, m_iRecordVideoNum); if (0 == iremote_audio_length) { LogFailedEvent(eRemoteAudioGetFailed, strmsg); } else { LogFailedEvent(eLocalAudioGetFailed, strmsg); } return -1; } else { m_iRecordVideoNum++; if (0 == m_iRecordVideoNum % 50) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "已成功写入视频帧数为(%d)", m_iRecordVideoNum); } } } else { if (eStand2Agent == m_eRecordType) { if (IsRecordingCamError(nInitActiveCam)) { LogFailedEvent(eVideoGetFailed, "remote recording camera error!"); return -1; } } else { if (IsRecordingCamError(nInitActiveCam, eSingleSide)) { LogFailedEvent(eVideoGetFailed, "recording camera error!"); return -1; } else { Rvc_Timeout(2); return -2; } } } return iret; } bool libvideorecord_impl::ReSetRecordParams() { m_bStopRecord = false; m_iRecordedTime = 0; //本段视频持续时间 m_iRecordVideoNum = 0; //已经录制的视频数 m_iAudioFromQueNum = 0; //从音频队列中取出的音频数量 m_iRemoteAudioNum = 0; //远端音频数量 m_iSubTitleTime = 0; //本段视频添加字幕耗时 m_iRecordAudioNum = 0; //已录制音频数 m_iRemoteAudioBufLen = 0; //远端音频拼接BUFFER的当前长度 m_iAudioBufferLen = 0; //音频录制拼接BUFFER的当前长度 memset(m_videoframe->data, 0, m_iVideoFrameSize); memset(m_pRecordAudioBuffer, 0, m_iRecordAudioBufSize); if (eSingleSide != m_eRecordType) { if (NULL != m_pRemoteAudioBuffer) { memset(m_pRemoteAudioBuffer, 0, m_iAudioPerSecBufLen); } } return true; } bool libvideorecord_impl::GetRemoteAudioFrame(int iAudioLens) { bool bGetRemoteAudio = false; audio_frame* RemoteAudio = new audio_frame; RemoteAudio->data = m_pRemoteAudioBuffer + m_iRemoteAudioBufLen; if (iAudioLens <= MAX_AUDIOQUEUE_LENS && iAudioLens > 1) { bGetRemoteAudio = m_remote_audioqueue->GetAudioAndDel(RemoteAudio); } else { bGetRemoteAudio = m_remote_audioqueue->GetAudio(RemoteAudio); } if (bGetRemoteAudio) { //if (m_bIsAudioNsOn && NULL != m_pAudioNsObj){ // char pOutAudio[MAX_PATH*3] = {0}; // int iRet = m_pAudioNsObj->NsProcess(pOutAudio,RemoteAudio->framesize, RemoteAudio->data, RemoteAudio->framesize); // if (0 == iRet){ // memset(m_pRemoteAudioBuffer + m_iRemoteAudioBufLen, 0, RemoteAudio->framesize); // memcpy(m_pRemoteAudioBuffer + m_iRemoteAudioBufLen, pOutAudio, RemoteAudio->framesize); // } //} m_pHostApi->Debug(RECORD_LOG_DEBUG, "nAudioLens > 0 && nRemoteAudioLens > 0 remote audio series number is %d, channels is %d, samplespersec is %d.", RemoteAudio->iseriesnumber, RemoteAudio->nchannels, RemoteAudio->samplespersec); m_iRemoteAudioBufLen += RemoteAudio->framesize; m_iRemoteAudioNum++; m_pHostApi->Debug(RECORD_LOG_DEBUG, "m_iRemoteAudioNum is %d.", m_iRemoteAudioNum); } delete RemoteAudio; return bGetRemoteAudio; } bool libvideorecord_impl::GetLocalAudioFrame(int iAudioLens) { bool bRet = false; //取出音频 m_audioframe->data = m_pRecordAudioBuffer + m_iAudioBufferLen; bool bGetAudio = false; if (m_audioqueue && (iAudioLens <= MAX_AUDIOQUEUE_LENS)) { bGetAudio = m_audioqueue->GetAudioAndDel(m_audioframe); } else { bGetAudio = m_audioqueue->GetAudio(m_audioframe); } if (bGetAudio) { if (m_bIsAudioNsOn && NULL != m_pAudioNsObj) { char pOutAudio[CAPTURE_FRAME_MAX_LEN] = { 0 }; int iRet = m_pAudioNsObj->NsProcess(pOutAudio, CAPTURE_FRAME_MAX_LEN, m_audioframe->data, m_audioframe->framesize); if (0 == iRet) { memset(m_pRecordAudioBuffer + m_iAudioBufferLen, 0, m_audioframe->framesize); memcpy(m_pRecordAudioBuffer + m_iAudioBufferLen, pOutAudio, m_audioframe->framesize); } } if (CAPTURE_CLOCK == m_audioframe->samplespersec) { m_iAudioBufferLen += m_audioframe->framesize; m_iAudioFromQueNum++; //if ((0 == m_iAudioFromQueNum % 100) && 0 != m_audioframe->iseriesnumber) { // m_pHostApi->Debug(RECORD_LOG_DEBUG, "current audio queue len is %d and audio series number is: %d.", iAudioLens, m_audioframe->iseriesnumber); //} bRet = true; } else { m_pHostApi->Debug(RECORD_LOG_ERROR, "not support audio sample rate, and audio samplespersec=%d,framesize=%d,channels=%d,format=%d,bitspersample=%d,series number=%d.", m_audioframe->samplespersec, m_audioframe->framesize, m_audioframe->nchannels, m_audioframe->format, m_audioframe->bitspersample, m_audioframe->iseriesnumber); LogFailedEvent(eSampNotSupport, "not support audio sample rate."); } } else { m_pHostApi->Debug(RECORD_LOG_ERROR, "get audio from queue failed."); } return bRet; } bool libvideorecord_impl::GetSingleSideAudioFrame() { bool bRet = false; //取音频数据,合并成1s的音频 int nAudioLens = 0; if (m_audioqueue) { nAudioLens = m_audioqueue->GetAudioLens(); } if (nAudioLens > 0) { if (m_iAudioBufferLen < m_iAudioPerSecBufLen) { bRet = GetLocalAudioFrame(nAudioLens); } if (m_iAudioBufferLen >= m_iAudioPerSecBufLen) {//本地录像双声道录音 if (2 == m_iAudioChannels) { if (m_iRecordAudioBufSize == m_iAudioBufferLen * 2) { uint32_t uOutlen = ConstructStereoAudioData(m_iRecordAudioBufSize, m_pRecordAudioBuffer, m_iAudioBufferLen, m_audioframe->bitspersample / 8); m_iAudioBufferLen = uOutlen; } } } } return bRet; } bool libvideorecord_impl::AddMuteAudioFrame(bool bLocal, int iAudioFrameSize) { bool bRet = false; if (0 == iAudioFrameSize) { return bRet; } if (bLocal) { memset(m_pRecordAudioBuffer + m_iAudioBufferLen, 0, iAudioFrameSize); m_iAudioBufferLen += iAudioFrameSize; m_iAudioFromQueNum++; if (0 == m_iAudioFromQueNum % 100) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "m_iAudioFromQueNum is %d(Mute AudioFrame).", m_iAudioFromQueNum); } bRet = true; } else { memset(m_pRemoteAudioBuffer + m_iRemoteAudioBufLen, 0, iAudioFrameSize); m_iRemoteAudioBufLen += iAudioFrameSize; m_iRemoteAudioNum++; if (0 == m_iRemoteAudioNum % 100) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "m_iRemoteAudioNum is %d(Mute Audio Frame).", m_iRemoteAudioNum); } bRet = true; } return bRet; } bool libvideorecord_impl::InitDefaultAudioParams(audio_frame* paudio) { bool bRet = false; if (NULL != paudio) { paudio->samplespersec = 8000; paudio->framesize = 320; paudio->nchannels = 1; paudio->format = 1; paudio->bitspersample = 16; paudio->iseriesnumber = 0; bRet = true; } return bRet; } bool libvideorecord_impl::GetRecordAudioFrame() { bool bRet = false; //单向只录制本地音频 if (eSingleSide == m_eRecordType) { if (false == m_bMuteAudio) { bRet = GetSingleSideAudioFrame(); if (!bRet) { bRet = AddMuteAudioFrame(true, m_iAudioPerSecBufLen / 50); } } else { bRet = AddMuteAudioFrame(true, m_iAudioPerSecBufLen / 50); } } else{ //取音频数据,合并成1秒的音频 int nAudioLens = m_audioqueue->GetAudioLens(); int nRemoteAudioLens = m_remote_audioqueue->GetAudioLens(); m_pHostApi->Debug(RECORD_LOG_DEBUG,"current local audio length is %d, and remote audio length is %d.", nAudioLens, nRemoteAudioLens); //1. 取本地音频 if (m_iAudioBufferLen < m_iAudioPerSecBufLen) { if (nAudioLens > 0) {//取到本地音频 bRet = GetLocalAudioFrame(nAudioLens); } else {//本地音频队列为空,用静音填充 bRet = AddMuteAudioFrame(true, m_iAudioPerSecBufLen/50); } } //2. 取远端音频 if (m_iRemoteAudioBufLen < m_iAudioPerSecBufLen) { if (nRemoteAudioLens > 0) { bRet = GetRemoteAudioFrame(nRemoteAudioLens); //取远端音频 } else{ bRet = AddMuteAudioFrame(false, m_iAudioPerSecBufLen / 50); } } //3. 可视远程双录场景,本地和坐席端分别录制到左右声道 if ((m_iRemoteAudioBufLen == m_iAudioPerSecBufLen) && (m_iAudioBufferLen == m_iAudioPerSecBufLen)) { uint32_t uOutlen = BindPCMAudioData(m_iRecordAudioBufSize, m_pRecordAudioBuffer, m_iAudioBufferLen, m_pRemoteAudioBuffer, m_iRemoteAudioBufLen, m_audioframe->bitspersample / 8, eLocalLeft); m_pHostApi->Debug(RECORD_LOG_DEBUG, "double channel buffer length is %d, local buffer len is %d, remote buffer len is %d.", m_iRecordAudioBufSize, m_iAudioBufferLen, m_iRemoteAudioBufLen); m_iAudioBufferLen = uOutlen; } } return bRet; } int libvideorecord_impl::WriteAudioFrame() { int iRet = 0; if (ReceiveRecordAudioData((unsigned char*)m_pRecordAudioBuffer, m_iAudioBufferLen)) { m_iRecordAudioNum++; if (0 == m_iRecordAudioNum % 10) { m_pHostApi->Debug(RECORD_LOG_DEBUG, "已成功写入音频帧数为(%d), 当前音频包大小为(%d).", m_iRecordAudioNum, m_iAudioBufferLen); } m_iAudioBufferLen = 0; m_iRemoteAudioBufLen = 0; memset((void*)m_pRecordAudioBuffer, 0, m_iRecordAudioBufSize); if (NULL != m_pRemoteAudioBuffer) { memset(m_pRemoteAudioBuffer, 0, m_iAudioPerSecBufLen); } } else { char strinfo[MAX_PATH] = { 0 }; rvc_snprintf(strinfo, MAX_PATH, "音频流写入失败, m_iAudioBufferLen = %d, m_iRecordAudioNum = %d.", m_iAudioBufferLen, m_iRecordAudioNum); LogFailedEvent(eAudioStreamWriteFailed, strinfo); return -1; } return iRet; } bool libvideorecord_impl::InitVideoRecordParams() { bool bRet = false; //获取视频帧大小 if (!GetRecordVideoFrameSize()) { if (false == m_bStopRecord) { LogFailedEvent(eVideoGetFailed, "Get Video Frame Failed Max Times.", true); } return bRet; } // 静音模式录像 if (m_bMuteAudio) { m_iAudioFrameSize = 320; } else { //获取音频帧大小 if (!GetRecordAudioFrameSize()) { if (eSingleSide == m_eRecordType) { if (false == m_bStopRecord) { m_bMuteAudio = true; m_iAudioFrameSize = 320; } else { return bRet; } } else { if (false == m_bStopRecord) { LogFailedEvent(eLocalAudioGetFailed, "Get Audio Frame Failed Max Times.", true); } return bRet; } } } //初始化视频buffer InitRecordVideoFrameBuffer(); //获取音频参数 audio_frame* audio = new audio_frame; // 静音模式录像 if (m_bMuteAudio) { InitDefaultAudioParams(audio); } else { if (!GetRecordAudioParams(&audio)) { RECORD_SAFE_DELETE(audio); LogFailedEvent(eLocalAudioGetFailed, "Get Audio Frame Data Failed Max Times.", true); return bRet; } } CalcuRecordAudioParams(audio); //初始化音频buffer InitRecordAudioBuffer(); uint32_t uOutPutBitRate = GetAudioOutPutBitRate(audio->samplespersec, m_iAudioChannels, m_eAudioType); if (m_bIsAudioNsOn && NULL != m_pAudioNsObj) { if (m_pAudioNsObj->SetNsParams(audio->samplespersec, CAPTURE_FRAME_TIME, m_iNsPolicy)) { m_bIsAudioNsOn = false; m_pHostApi->Debug(RECORD_LOG_INFO, "Ns audio set params failed, set AudioNsOn false."); } } //m_pHostApi->Debug(RECORD_LOG_DEBUG, "InitRecordWriter samplespersec=%d,audioOutChannels=%d,audioOutBitRate=%d bps.", audio->samplespersec, m_iAudioChannels, uOutPutBitRate); if (!InitRecordWriter(m_VideoFileName, m_iWidth, m_iHeight, 24, m_nFps, audio->samplespersec, m_iAudioChannels, audio->bitspersample, 6, m_videoquality, uOutPutBitRate, (int)m_eAudioType)) { RECORD_SAFE_DELETE(audio); LogFailedEvent(eInitialFailed, "初始化失败!"); return bRet; } RECORD_SAFE_DELETE(audio); bRet = true; return bRet; } bool libvideorecord_impl::ResetMediaQueue() { #ifdef _WIN32 #else RECORD_SAFE_DELETE(m_local_audioqueue); RECORD_SAFE_DELETE(m_env_videoqueue); RECORD_SAFE_DELETE(m_opt_videoqueue); #endif return true; }