共计 13554 个字符,预计需要花费 34 分钟才能阅读完成。
前段时间 SkeyeLive 凋谢了 DirectShow 采集库,这个库底层采纳 DirectShow SDK 的接口实现音视频的预览 (播放) 和采集;很多人可能还不太理解这个封装库的回调形式和之前的 DShow 线程采集形式有什么不同,或者说对 DirectShow 的采集流程还不太熟悉,上面我将就 Windows 平台下用应用 DirectShow 的过滤器(滤波器)进行流媒体开发的前端采集局部进行简要介绍,如果大家想深刻的学习和摸索,举荐大家去看看《Visual C++ 音频 / 视频解决技术及工程实际》这本书,第 9 章有具体的流程解说。
一、枚举采集设施
应用采集设施前,须要首先确定零碎曾经装置的采集设施:视频、音频采集设施。零碎设施枚举器为按类型枚举已注册在零碎中的滤波器提供了对立的办法。而且它可能辨别不同的硬件设施,即使是同一个滤波器反对它们。这对那些应用 Windows 驱动模型、KSProxy Filter 的设施来说是十分有用的,零碎设施枚举器对它们按不同的设施实例进行看待。
当利用零碎设施枚举器查问设施的时候,零碎设施枚举器为特定类型的设施(如音频捕捉和视频压缩)生成了一张枚举表(Enumerator)。类型枚举器(Category Enumerator)为每个这种类型的设施返回一个 Moniker,类型枚举器主动把每种即插即用的设施蕴含在内。
调用规范办法 CoCreateInstance 生成零碎设施枚举器(Device Enumerator),类标识(CLSID)为 CLSID_SystemDeviceEnum,办法如下:
CAMERA_LIST_T *CCameraDS::GetCameraList() | |
{if (NULL != cameraList.pCamera || cameraList.count > 0) | |
{return &cameraList;} | |
if (NULL == cameraList.pCamera) | |
{ | |
cameraList.count = 0; | |
// enumerate all video capture devices | |
CComPtr<ICreateDevEnum> pCreateDevEnum; | |
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, | |
IID_ICreateDevEnum, (void**)&pCreateDevEnum); | |
CComPtr<IEnumMoniker> pEm; | |
hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEm, 0); | |
if (hr != NOERROR) | |
{return &cameraList;} | |
pEm->Reset(); | |
CAMERA_INFO_T *pCameraList = cameraList.pCamera; | |
CAMERA_INFO_T *pCameraInfo = NULL; | |
ULONG cFetched; | |
IMoniker *pM = NULL; | |
while(hr = pEm->Next(1, &pM, &cFetched), hr==S_OK) | |
{ | |
pCameraInfo = new CAMERA_INFO_T; | |
memset(pCameraInfo, 0x00, sizeof(CAMERA_INFO_T)); | |
IPropertyBag *pBag=0; | |
hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag); | |
if(SUCCEEDED(hr)) | |
{ | |
VARIANT var; | |
var.vt = VT_BSTR; | |
hr = pBag->Read(L"FriendlyName", &var, NULL); // 还有其余属性, 像形容信息等等... | |
if(hr == NOERROR) | |
{ | |
// 获取设施名称 | |
WideCharToMultiByte(CP_ACP,0,var.bstrVal,-1,pCameraInfo->friendlyName, sizeof(pCameraInfo->friendlyName) ,"",NULL); | |
SysFreeString(var.bstrVal); | |
} | |
pBag->Release(); | |
cameraList.count++; | |
{ | |
pCameraList = cameraList.pCamera; | |
if (NULL == cameraList.pCamera) cameraList.pCamera = pCameraInfo; | |
else | |
{while (NULL != pCameraList->pNext) | |
{pCameraList = pCameraList->pNext;} | |
pCameraList->pNext = pCameraInfo; | |
} | |
} | |
} | |
pM->Release();} | |
pCreateDevEnum = NULL; | |
pEm = NULL; | |
} | |
return &cameraList; | |
} |
这是视频设施枚举局部,当然音频也是一样的,只须要把 CreateClassEnumerator 函数的第一个参数换成 CLSID_AudioInputDeviceCategory 即可。
留神:调用 ICreateDevEnum::CreateClassEnumerator 办法生成类型枚举器,参数为用户想要失去的类的 ID(CLSID),该办法返回一个 IEnumMoniker 接口指针。如果指定的类型是空的或不存在,则函数 ICreateDevEnum::CreateClassEnumerator 将返回 S_FALSE 而不是错误代码,同时 IEnumMoniker 指针也是空的,这就要求咱们在调用 CreateClassEnumerator 的时候明确用 S_OK 进行比拟而不应用宏 SUCCEEDED;(扯远了 …)而在 SkeyeLive 中还提供了另外一种枚举音频设备的形式,那就是采纳 DirectSound 的枚举形式:DirectSoundCaptureEnumerate 这个函数来实现的,须要留神,这个函数枚举出的设施 GUID 有可能是空的,设施名称可能表象为”声卡主设施驱动“,经测试:这个设施是不能用于采集,也是不存在的,枚举过程中应该抛弃。当然,其实 DirectShow 也是封装了底层的 DirectSound 的接口来实现的 COM 接口的对立封装。
(须要重点阐明的是:枚举设施这一块不是 DShow 封装库中的代码,这是由咱们 EasyDarwin 团队的 Gavin 大神之前的 DShow 采集局部代码中提供的(前身是 EasyCamera_win),我只是鸠占鹊巢的给大家解说,向大神致敬~~~ 哈哈哈!)
二、应用 Capture Graph Builder 进行音视频采集
这个为了节约篇幅,本文以视频采集为例子进行解说,其实音频采集是截然不同的(这就是封装的益处,不必关怀底层的实现细节);
1、创立 GraphBuilder
应用 DirectShow 进行视频采集,首先,创立视频捕捉 Graph,DShow SDK 提供的是 Graph Builder 接口是 IgraphBuilder。不过针对捕捉工作(Capture),还有另一个接口 ICaptureGraphBuilder2 针对采集捕捉的增强型接口,这个接口能够提供视频捕捉预览窗口的创立和应用,而后,再创立一个媒体控制器对视频预览的播放进行管制,代码如下:
// 创立视频捕捉 Graph | |
HRESULT CCaptureVideo::CreateCaptureGraphBuilder() | |
{ | |
HRESULT hr=NOERROR; | |
if(m_pGraphBuilder==NULL) | |
{hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **)&m_pGraphBuilder); | |
if(FAILED(hr)) | |
{//ERR_DEBUG("CreateCaptureGraphBuilder Create m_pGraphBuilder Failed"); | |
return hr; | |
} | |
} | |
if(m_pCaptureGraphBulid==NULL) | |
{ | |
//// 创立 ICaptureGraphBuilder2 接口,即创立视频捕捉窗 | |
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC, | |
IID_ICaptureGraphBuilder2, (void **) &m_pCaptureGraphBulid); | |
if (FAILED(hr)) | |
{//ERR_DEBUG("CreateCaptureGraphBuilder CaptureGraphBuilder2 Failed"); | |
return hr; | |
} | |
// 将捕捉窗的视频属性设为定义好的视频窗 | |
// 给 captureGrap builder 指定一个图象 filter, 不能因为混合视频的出现,video 端口的治理 | |
hr = m_pCaptureGraphBulid->SetFiltergraph(m_pGraphBuilder); | |
} | |
// 此处可能存在问题是否为一个链路就一个 m_pMediaCon 媒体管制 //QueryInterface | |
if(m_pMediaCon==NULL) | |
{hr = m_pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&m_pMediaCon); | |
if (FAILED(hr)) | |
{//ERR_DEBUG("CreateCaptureGraphBuilder QueryInterface m_pMediaCon Failed"); | |
return hr; | |
} | |
} | |
if(m_pMediaEvent==NULL) | |
{hr = m_pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **) &m_pMediaEvent); | |
if (FAILED(hr)) | |
{//ERR_DEBUG("CreateCaptureGraphBuilder QueryInterface m_pMediaEvent Failed"); | |
return hr; | |
} | |
} | |
return hr; | |
} |
2、枚举设施并连贯设施
枚举设施咱们在上一节曾经讲过了,这里间接查问到本人须要捕捉的设施名称,而后绑定到过滤器上即可:
// 枚举设施并绑定设施 | |
BOOL CCaptureVideo::BindToVideoDev(int deviceId, IBaseFilter **pFilter) | |
{if (deviceId < 0) | |
{return FALSE;} | |
CComPtr<ICreateDevEnum> pCreateDevEnum; | |
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, | |
IID_ICreateDevEnum, (void**)&pCreateDevEnum); | |
if (hr != NOERROR) | |
{//ERR_DEBUG("Instance DeviceEnum Failed"); | |
return FALSE; | |
} | |
CComPtr<IEnumMoniker> pEm; | |
hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,&pEm, 0); | |
if (hr != NOERROR) | |
{//ERR_DEBUG("Enum VideoInputDeviceCategory Failed"); | |
return FALSE; | |
} | |
pEm->Reset(); | |
ULONG cFetched; | |
IMoniker *pM=NULL; | |
int index = 0; | |
while(((pEm->Next(1, &pM, &cFetched))==S_OK)&&(index <= deviceId)) | |
{ | |
IPropertyBag *pBag=NULL; | |
if (pM==NULL) | |
{break;} | |
hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pBag); | |
if(pBag!=NULL) | |
{ | |
VARIANT var; | |
var.vt = VT_BSTR; | |
hr = pBag->Read(L"FriendlyName", &var, NULL); | |
if (hr == NOERROR) | |
{if (index == deviceId) | |
{ | |
// 将视频设施绑定到根底过滤器上 | |
pM->BindToObject(0, 0, IID_IBaseFilter, (void**)pFilter); | |
} | |
SysFreeString(var.bstrVal); | |
pBag->Release();} | |
} | |
pM->Release(); | |
index++; | |
} | |
return TRUE; | |
} |
3、采集参数的设置
采集前须要对要采集的视频格式、图像品质进行设置,如视频的分辨率、帧率和数据格式,图像的亮度、色度和饱和度参数设置等。当然,咱们这里只针对视频的宽高,帧率和数据格式进行了设置,如果大家还想进行更多的设置,能够应用 OleCreatePropertyFrame 函数以属性页的形式对视频属性和图像参数进行配置和批改。
HRESULT CCaptureVideo::SetVideoSize(int nPreview,CString strRGBBytes,int nFrameRate,int iWidth , int iHeight) | |
{ | |
HRESULT hr=E_FAIL; | |
if(m_pCaptureGraphBulid==NULL) | |
return hr; | |
IAMStreamConfig *pAMStreamConfig=NULL; | |
if(nPreview==0) | |
{ | |
hr = m_pCaptureGraphBulid->FindInterface(&PIN_CATEGORY_PREVIEW,&MEDIATYPE_Video, | |
m_pBaseFilter,IID_IAMStreamConfig,(void **)&pAMStreamConfig); | |
} | |
else | |
{ | |
hr = m_pCaptureGraphBulid->FindInterface(&PIN_CATEGORY_CAPTURE,&MEDIATYPE_Video, | |
m_pBaseFilter,IID_IAMStreamConfig,(void **)&pAMStreamConfig); | |
} | |
if(FAILED( hr)) | |
{SAFE_RELEASE(pAMStreamConfig); | |
return hr; | |
} | |
// 失去视频格式大小 | |
AM_MEDIA_TYPE *pmt; | |
pAMStreamConfig->GetFormat(&pmt); | |
// 设置视频格式 | |
pmt->majortype = MEDIATYPE_Video; | |
pmt->subtype = GetMediaTypeGuid(strRGBBytes); | |
VIDEOINFOHEADER *pvih = reinterpret_cast<VIDEOINFOHEADER *>(pmt->pbFormat); | |
// 设置回去 | |
int nDefualWidth = pvih->bmiHeader.biWidth; | |
int nDefualHeight = pvih->bmiHeader.biHeight; | |
pvih->bmiHeader.biWidth = iWidth; | |
pvih->bmiHeader.biHeight = iHeight; | |
pvih->bmiHeader.biSizeImage = pmt->lSampleSize = iWidth*iHeight*pvih->bmiHeader.biPlanes*pvih->bmiHeader.biBitCount/8; | |
pvih->AvgTimePerFrame = (LONGLONG)(10000000/nFrameRate); | |
hr = pAMStreamConfig->SetFormat(pmt); | |
if(FAILED(hr)) | |
{ | |
// 如果设置失败能够选用默认的, 但使用之后, 小屏幕初始化时会呈现闪动的状况 | |
pvih->bmiHeader.biWidth = nDefualWidth; | |
pvih->bmiHeader.biHeight = nDefualHeight; | |
pvih->bmiHeader.biSizeImage = pmt->lSampleSize = nDefualWidth*nDefualHeight*pvih->bmiHeader.biPlanes*pvih->bmiHeader.biBitCount/8; | |
hr = pAMStreamConfig->SetFormat(pmt); | |
if(FAILED(hr)) | |
{SAFE_RELEASE(pAMStreamConfig); | |
FreeMediaType(*pmt); | |
//ERR_DEBUG("初始化设置视频格式失败"); | |
return hr; | |
} | |
} | |
SAFE_RELEASE(pAMStreamConfig); | |
FreeMediaType(*pmt);// | |
return hr; | |
} |
4、建设视频渲染器
创立视频渲染器(出现器),对捕捉视频进行显示,代码如下:
HRESULT CCaptureVideo::CreateVideoRender(int nType) | |
{ | |
HRESULT hr = NOERROR; | |
if(m_pWindowRender) | |
{SAFE_RELEASE(m_pWindowRender); | |
} | |
// 创立解码器 filter,CLSID_VideoRendererDefault,// | |
if(nType==0) | |
{SAFE_RELEASE(m_pWindowRender); | |
return NOERROR; | |
} | |
else if(nType==1) | |
{ | |
hr = CoCreateInstance(CLSID_VideoRendererDefault,0,CLSCTX_INPROC_SERVER, | |
IID_IBaseFilter,(void **)&m_pWindowRender); | |
} | |
else if(nType==2) | |
{ | |
hr = CoCreateInstance(CLSID_VideoRenderer,0,CLSCTX_INPROC_SERVER, | |
IID_IBaseFilter,(void **)&m_pWindowRender); | |
} | |
else if(nType==3)// 不显示 | |
{ | |
hr = CoCreateInstance(CLSID_NullRenderer,0,CLSCTX_INPROC_SERVER, | |
IID_IBaseFilter,(void **)&m_pWindowRender); | |
} | |
else | |
{SAFE_RELEASE(m_pWindowRender); | |
return NOERROR; | |
} | |
if(FAILED(hr)) | |
{SAFE_RELEASE(m_pWindowRender); | |
//ERR_DEBUG("接管创立出现器失败"); | |
return hr; | |
} | |
if(m_pGraphBuilder) | |
{hr = m_pGraphBuilder->AddFilter(m_pWindowRender,L"recv render"); | |
if(FAILED(hr)) | |
{SAFE_RELEASE(m_pWindowRender); | |
//ERR_DEBUG("退出接管创立出现器失败"); | |
return hr; | |
} | |
} | |
return hr; | |
} |
5、预览采集到的视频数据
首先,初始化过滤器链路管理器,把指定采集设施的过滤器增加到链路中,而后渲染 RenderStream 办法把所有的过滤器链接起来,最初依据设定的显示窗口预览采集到的视频数据,具体实现过程如下:
hr = CreateCaptureSampleGrabber(m_strRGBByte); | |
if(FAILED(hr)) | |
{SAFE_RELEASE(m_pSampleGrabberFilter); | |
SAFE_RELEASE(m_pSampleGrabber); | |
//ERR_DEBUG("CreateCaptureSampleGrabber Failed"); | |
return -1; | |
} | |
if(m_nDeinterlace==1)//m_iHeight/m_iWidth!=(1.5/4)) | |
{CreateDeinterlaceFilter(); | |
} | |
if(m_pVideoDeinterlaceFilter) | |
{hr = m_pCaptureGraphBulid->RenderStream(&pCategorySuc,&MEDIATYPE_Video,m_pBaseFilter,m_pVideoDeinterlaceFilter,m_pSampleGrabberFilter); | |
hr = m_pCaptureGraphBulid->RenderStream(NULL,NULL,m_pSampleGrabberFilter,NULL,m_pWindowRender); | |
if(FAILED(hr)) | |
{hr = m_pCaptureGraphBulid->RenderStream(&PIN_CATEGORY_PREVIEW,&MEDIATYPE_Video,m_pBaseFilter,m_pSampleGrabberFilter,NULL); | |
if(FAILED(hr)) | |
{//ERR_DEBUG("PrivewVideoDev RenderStream Failed"); | |
return -1; | |
} | |
} | |
} | |
else | |
{hr = m_pCaptureGraphBulid->RenderStream(&pCategorySuc,&MEDIATYPE_Video,m_pBaseFilter,m_pSampleGrabberFilter,m_pWindowRender); | |
if(FAILED(hr)) | |
{hr = m_pCaptureGraphBulid->RenderStream(&pCategoryFail,&MEDIATYPE_Video,m_pBaseFilter,m_pSampleGrabberFilter,m_pWindowRender); | |
if(FAILED(hr)) | |
{//ERR_DEBUG("PrivewVideoDev RenderStream Failed"); | |
return -1; | |
} | |
} | |
} | |
if(m_bThread) | |
{m_pSampleGrabber->SetOneShot(FALSE); | |
m_pSampleGrabber->SetBufferSamples(TRUE); | |
// m_pSampleGrabber->SetOneShot(TRUE); | |
} | |
else | |
{m_pSampleGrabber->SetBufferSamples(TRUE); | |
m_pSampleGrabber->SetOneShot(FALSE); | |
//m_nDataType: 数据类型 1 -- 音频,2-- 视频数据 | |
//nIndex:设施编号:音频 -1,视频 0 ---N | |
m_cSampleGrabberCB.SetDataInfo(m_nIndex, m_nDataType); | |
int nMode=1;//0--SampleCB,1--BufferCB | |
m_pSampleGrabber->SetCallback(&m_cSampleGrabberCB, nMode); | |
} | |
if(m_pVideoWin==NULL&&m_nRenderType!=2) | |
{ | |
hr = m_pCaptureGraphBulid->FindInterface(&pCategorySuc,&MEDIATYPE_Video,//CAPTURE | |
m_pBaseFilter,IID_IVideoWindow,(void **)&m_pVideoWin); | |
if (FAILED(hr))// 建设失败则查找 CAPTURE 口 | |
{ | |
hr = m_pCaptureGraphBulid->FindInterface(&pCategoryFail,&MEDIATYPE_Video,//CAPTURE | |
m_pBaseFilter,IID_IVideoWindow,(void **)&m_pVideoWin); | |
if (FAILED(hr)) | |
{//ERR_DEBUG("CreateCaptureGraphBuilder QueryInterface m_pVideoWin Failed"); | |
return -1; | |
} | |
} | |
} | |
// 设置视频显示窗口 | |
SetupVideoWindow(); | |
hr = StartPreview(); | |
if(FAILED(hr)) | |
{return -1;} |
而后,设置视频显示窗口:
// 设置窗口句柄,并自动更新显示大小 | |
HRESULT CCaptureVideo::SetupVideoWindow(HWND hWnd) | |
{ | |
HRESULT hr=NOERROR; | |
if(m_pVideoWin==NULL) | |
{return hr;} | |
if(hWnd!=NULL) | |
m_hWnd = hWnd; | |
hr = m_pVideoWin->put_Owner((OAHWND)m_hWnd); | |
// hr = m_pVideoWin->put_MessageDrain((OAHWND)m_hWnd); | |
if (FAILED(hr)) | |
{//ERR_DEBUG("SetupVideoWindow put_Owner Error"); | |
return hr; | |
} | |
hr = m_pVideoWin->put_WindowStyle(WS_CHILD |SS_NOTIFY| WS_CLIPCHILDREN); | |
if (FAILED(hr)) | |
{//ERR_DEBUG("SetupVideoWindow put_WindowStyle"); | |
return hr; | |
} | |
ResizeVideoWindow(); | |
hr = m_pVideoWin->put_Visible(OATRUE); | |
return hr; | |
} |
最初,开始预览:
HRESULT CCaptureVideo::StartPreview() | |
{ | |
HRESULT hr=NOERROR; | |
if(m_pMediaCon==NULL) | |
{hr = m_pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&m_pMediaCon); | |
if(SUCCEEDED(hr)) | |
{hr = m_pMediaCon->Run(); | |
if(FAILED(hr)) | |
{m_pMediaCon->Stop(); | |
} | |
} | |
} | |
else | |
{hr = m_pMediaCon->Run(); | |
if(FAILED(hr)) | |
{m_pMediaCon->Stop(); | |
} | |
} | |
return hr; | |
} |
至此,DShow 对视频的采集整个残缺的流程就实现了。
三、DShow 采集的两种模式
1、线程模式(拉模式)
线程模式采纳多线程的形式,在线程回调中调用 GetCurrentBuffer 函数获取采集缓存中的一帧数据,这里获取的数据是之前设置的色调格局的数据 (如果设置胜利的话,否则是默认格局);如果要失去指定的格局须要进行色调格局转换。
线程执行函数如下:
void CCaptureVideo::OnThreadDeal() | |
{ | |
BYTE *pData=NULL; | |
long lDatasize=0; | |
char strMediaType[24]=_T("YUY2"); | |
if(!m_strRGBByte.IsEmpty()) | |
{strcpy_s(strMediaType, 24, m_strRGBByte); | |
} | |
// 读取缓冲区数据 | |
pData=GetCurrentBuffer(lDatasize,strMediaType); | |
//TRACE("OnThreadDeal:%d\r\n",lDatasize); | |
if (m_sThreadCalbackInfo.realDataCalFunc&&m_sThreadCalbackInfo.pMaster&&pData&&lDatasize>0) | |
{ | |
// 执行数据回调 | |
m_sThreadCalbackInfo.realDataCalFunc(m_nIndex, pData, lDatasize, | |
(RealDataStreamType)m_nDataType,NULL, m_sThreadCalbackInfo.pMaster); | |
} | |
} |
2、回调模式 (推模式)
回调模式是利用 ISampleGrabber 提供的回调函数接口进行设置,该设置回调函数原型如下:
virtual HRESULT STDMETHODCALLTYPE ISampleGrabber::SetCallback( | |
ISampleGrabberCB *pCallback, | |
long WhichMethodToCallback) = 0; |
从函数接口咱们能够看出,第一个参数须要一个 ISampleGrabberCB 类型的参数,咱们自定义一个回调接口类:class CSampleGrabberCB:public ISampleGrabberCB,用以解决回调数据的接管及内查;
设置接口:
/* | |
nDataType: 数据类型 1 -- 音频,2-- 视频数据 | |
nIndex:设施编号:音频 -1,视频 0 ---N | |
*/ | |
void CSampleGrabberCB::SetDataInfo(int nIndex,int nDataType) | |
{ | |
m_nIndex=nIndex; | |
m_nDataType= nDataType; | |
} |
回调函数解决:
// 对立的回调函数 | |
STDMETHODIMP CSampleGrabberCB::BufferCB(double dblSampleTime, BYTE *pBuffer, long lBufferSize) | |
{if (!pBuffer) | |
return E_POINTER; | |
now_tick=::GetTickCount(); | |
first_tick = now_tick; | |
if (m_nDataType == 1)// 视频 | |
{// TRACE("BufferCB:%d-%d-%d-%d\r\n", m_nIndex, m_nDataType, now_tick - first_tick, lBufferSize); | |
} | |
else if (m_nDataType == 2)// 音频 | |
{// TRACE("BufferCB:%d-%d-%d-%d\r\n", m_nIndex, m_nDataType, now_tick - first_tick, lBufferSize); | |
} | |
if(pBuffer==NULL||lBufferSize<=0) | |
return 0; | |
if (m_realDataCallback && m_pMaster) | |
{m_realDataCallback(m_nIndex, pBuffer, lBufferSize, (RealDataStreamType)m_nDataType,NULL, m_pMaster); | |
return 0; | |
} | |
if(g_cbInfo.lBufferSize < lBufferSize) | |
{delete [] g_cbInfo.pBuffer; | |
g_cbInfo.pBuffer = NULL; | |
g_cbInfo.lBufferSize = 0; | |
g_cbInfo.bHaveData=FALSE; | |
} | |
// Since we can 't access Windows API functions in this callback, just | |
// copy the bitmap data to a global structure for later reference. | |
g_cbInfo.dblSampleTime = dblSampleTime; | |
// If we haven 't yet allocated the data buffer, do it now. | |
// Just allocate what we need to store the new bitmap. | |
if (!g_cbInfo.pBuffer) | |
{g_cbInfo.pBuffer = new BYTE[lBufferSize]; | |
g_cbInfo.lBufferSize = lBufferSize; | |
} | |
if(!g_cbInfo.pBuffer) | |
{ | |
g_cbInfo.lBufferSize = 0; | |
g_cbInfo.bHaveData=FALSE; | |
return E_OUTOFMEMORY; | |
} | |
// Copy the bitmap data into our global buffer | |
memcpy(g_cbInfo.pBuffer, pBuffer, lBufferSize); | |
g_cbInfo.bHaveData=TRUE; | |
return 0; | |
} |
两种形式相比拟,个人观点:各有优劣;在 SkeyeLive 中咱们采纳的是回调形式,过后引进这个库就是为了能在采集端保障音视频从源头是同步的,当然,其实线程模式也是能实现同步的;线程模式的长处是:采集即时性高,即需即取,简直不会有延时,毛病就是:如果呈现取数据端不及时时,如果不思考缓存的状况下可能就会呈现丢帧。而回调模式就正好相同,其长处是:稳定性高,随时都能保障取的帧是间断的,即便不做缓存也不会呈现取出来的数据呈现丢帧的状况,当然在取数据时比方编码慢(或者回调中做其余延时解决),就会呈现预览和回调同步延时的状况,回调缓存的数据量会越来越大,延时也将增大;当然,如果在多路同时采集时,甚至多路同时进行数据处理时,采纳回调模式会更显劣势!