ISampleGrabber::GetCurrentBuffer() returns E_OUTOFMEMORY - c++

I am new to directshow and using DirectShow Sample "FrameGrabberDemo" and facing issue in getting the image. I tried with .avi and .mpg, both are giving same issue.
First issue might be the S_FALSE return value from IMediaControl::Run(). However, it is not an error and states that:
The graph is preparing to run, but some filters have not completed the
transition to a running state.
Second observation is ISampleGrabber::GetCurrentBuffer() returns E_OUTOFMEMORY code, which states that "The specified buffer is not large enough." However, the BitmapInfo has biImageSize = 1244160 and also MediaType has ISampleSize = 1244160.
HRESULT CFrameGrabberDemoDlg::DoExtractFrame()
{
WCHAR wFile[MAX_PATH];
MultiByteToWideChar( CP_ACP, 0, m_FilePath, -1, wFile, MAX_PATH );
// Create the graph builder
CComPtr<IGraphBuilder> pGraphBuilder;
HRESULT hr = ::CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
IID_IGraphBuilder, (void**)&pGraphBuilder);
if (FAILED(hr))
return hr;
ASSERT(pGraphBuilder != NULL);
// Create the "Grabber filter"
CComPtr<IBaseFilter> pGrabberBaseFilter;
CComPtr<ISampleGrabber> pSampleGrabber;
AM_MEDIA_TYPE mt;
hr = ::CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (LPVOID *)&pGrabberBaseFilter);
if (FAILED(hr))
return hr;
pGrabberBaseFilter->QueryInterface(IID_ISampleGrabber, (void**)&pSampleGrabber);
if (pSampleGrabber == NULL)
return E_NOINTERFACE;
hr = pGraphBuilder->AddFilter(pGrabberBaseFilter,L"Grabber");
if (FAILED(hr))
return hr;
ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
mt.formattype = FORMAT_VideoInfo;
hr = pSampleGrabber->SetMediaType(&mt);
if (FAILED(hr))
return hr;
hr = pGraphBuilder->RenderFile(wFile,NULL);
if (FAILED(hr))
return hr;
CComPtr<IMediaControl> pMediaControl;
CComPtr<IMediaEvent> pMediaEventEx;
// QueryInterface for some basic interfaces
pGraphBuilder->QueryInterface(IID_IMediaControl, (void **)&pMediaControl);
pGraphBuilder->QueryInterface(IID_IMediaEvent, (void **)&pMediaEventEx);
if (pMediaControl == NULL || pMediaEventEx == NULL)
return E_NOINTERFACE;
// Set up one-shot mode.
hr = pSampleGrabber->SetBufferSamples(TRUE);
if (FAILED(hr))
return hr;
hr = pSampleGrabber->SetOneShot(TRUE);
if (FAILED(hr))
return hr;
CComQIPtr<IMediaSeeking> pSeek = pMediaControl;
if (pSeek == NULL)
return E_NOINTERFACE;
LONGLONG Duration;
hr = pSeek->GetDuration(&Duration);
if (FAILED(hr))
return hr;
int NumSecs = int(Duration/10000000);
REFERENCE_TIME rtStart = 1 * 10000000;
if (NumSecs < 1)
rtStart = 0;
REFERENCE_TIME rtStop = rtStart;
hr = pSeek->SetPositions(&rtStart, AM_SEEKING_AbsolutePositioning,
&rtStop, AM_SEEKING_AbsolutePositioning);
if (FAILED(hr))
return hr;
CComQIPtr<IVideoWindow> pVideoWindow = pGraphBuilder;
hr = pVideoWindow->put_AutoShow(OAFALSE);
if (FAILED(hr))
return hr;
// Run the graph and wait for completion.
hr = pMediaControl->Run();
if (FAILED(hr))
return hr;
long evCode;
hr = pMediaEventEx->WaitForCompletion(INFINITE, &evCode);
if (FAILED(hr))
return hr;
AM_MEDIA_TYPE MediaType;
ZeroMemory(&MediaType,sizeof(MediaType));
hr = pSampleGrabber->GetConnectedMediaType(&MediaType);
if (FAILED(hr))
return hr;
// Get a pointer to the video header.
VIDEOINFOHEADER *pVideoHeader = (VIDEOINFOHEADER*)MediaType.pbFormat;
if (pVideoHeader == NULL)
return E_FAIL;
// The video header contains the bitmap information.
// Copy it into a BITMAPINFO structure.
BITMAPINFO BitmapInfo;
ZeroMemory(&BitmapInfo, sizeof(BitmapInfo));
CopyMemory(&BitmapInfo.bmiHeader, &(pVideoHeader->bmiHeader), sizeof(BITMAPINFOHEADER));
// Create a DIB from the bitmap header, and get a pointer to the buffer.
void *buffer = NULL;
HBITMAP hBitmap = ::CreateDIBSection(0, &BitmapInfo, DIB_RGB_COLORS, &buffer, NULL, 0);
GdiFlush();
// Copy the image into the buffer.
long size = 0;
hr = pSampleGrabber->GetCurrentBuffer(&size, (long *)buffer);
if (FAILED(hr))
return hr;
long Width = pVideoHeader->bmiHeader.biWidth;
long Height = pVideoHeader->bmiHeader.biHeight;
HBITMAP hOldBitmap = m_Image.SetBitmap(hBitmap);
if (hOldBitmap != NULL)
::DeleteObject(hOldBitmap);
return S_OK;
}
The ::CreateDIBSection is also not returning NULL and buffer also got initialized.
How can this be resolved?

You are requesting data into zero-length buffer:
long size = 0;
hr = pSampleGrabber->GetCurrentBuffer(&size, (long *)buffer);
The error code looks relevant:
If pBuffer is not NULL, set this parameter equal to the size of the buffer, in bytes.
E_OUTOFMEMORY The specified buffer is not large enough.
You simple need proper arguments in the call in question (proper buffer length).

Related

Failed to get IAMStreamConfig interface

I could capture the image from webcam and save it as bitmap by using sampleGrabber. And I know I could use the IAMStreamConfig interface to GetFormat and SetFormat the video resolution. My question is, I using FindInterface() to get IAMStreamConfig* but always failed.Is it because I place it in a wrong place or something else I didn't notice. I placed it before RenderStream. Here are some code below, thanks for your patient and help!
INT USBDeviceApp::GetInterfaces()
{
HRESULT hr;
hr = CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC,
IID_IGraphBuilder, (void **) &pGraphBuilder);
if (FAILED(hr))
return hr;
hr = CoCreateInstance (CLSID_CaptureGraphBuilder2 , NULL, CLSCTX_INPROC,
IID_ICaptureGraphBuilder2, (void **) &pCaptureGraphBuilder2);
if (FAILED(hr))
return hr;
hr = pGraphBuilder->QueryInterface(IID_IMediaControl,(LPVOID *)
&pMediaControl);
if (FAILED(hr))
return hr;
hr = pGraphBuilder->QueryInterface(IID_IVideoWindow, (LPVOID *)
&pVideoWindow);
if(FAILED(hr))
{
return hr;
}
hr = pGraphBuilder->QueryInterface(IID_IMediaEvent,(LPVOID *)
&pMediaEvent);
if(FAILED(hr))
{
return hr;
}
// ------------------------
// Create the Sample Grabber.
hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (void**)&pGrabberF);
if (FAILED(hr))
{
return hr;
}
hr = pGrabberF->QueryInterface(IID_ISampleGrabber,
(void**)&pSampleGrabber);
if(FAILED(hr))
{
AfxMessageBox(_T("Error SampleGrabber QueryInterface"));
}
return 1;
}
INT USBDeviceApp::InitMonikers()
{
HRESULT hr;
ULONG cFetched;
ICreateDevEnum *pCreateDevEnum;
hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void**)&pCreateDevEnum);
if (FAILED(hr))
{
return hr;
}
IEnumMoniker *pEnumMoniker;
hr = pCreateDevEnum->
CreateClassEnumerator(CLSID_VideoInputDeviceCategory,&pEnumMoniker, 0);
if (FAILED(hr) || !pEnumMoniker)
{
return -1;
}
hr = pEnumMoniker->Next(1, &pMonikerVideo, &cFetched);
if (S_OK == hr)
{
hr = pMonikerVideo->BindToObject(0,0,IID_IBaseFilter,
(void**)&pVideoCaptureFilter);
if (FAILED(hr))
{
return hr;
}
}
pEnumMoniker->Release();
return 1;
}
INT USBDeviceApp::CaptureVideo()
{
HRESULT hr = CoInitialize(NULL);
hr = GetInterfaces();
if (FAILED(hr))
{
AfxMessageBox(_T("Failed to get video interfaces!"));
return hr;
}
hr = pCaptureGraphBuilder2->SetFiltergraph(pGraphBuilder);
if (FAILED(hr))
{
AfxMessageBox(_T("Failed to attach the filter graph to the capture graph!"));
return hr;
}
//IAMStreamConfig *pConfig
hr = pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_PREVIEW,
&MEDIATYPE_Video,
pVideoCaptureFilter,IID_IAMStreamConfig, (void **)&pConfig);
if (FAILED(hr))
{
AfxMessageBox(_T("Couldn't initialize IAMStreamConfig!"));
}
else
{////
int iCount = 0,iSize = 0;
hr = pConfig->GetNumberOfCapabilities(&iCount,&iSize);
if(iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
for(int iFormat = 0;iFormat < iCount;iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = pConfig->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if(hr)
{
if((pmtConfig->majortype == MEDIATYPE_Video) &&
(pmtConfig->subtype == MEDIASUBTYPE_RGB24) &&
(pmtConfig->formattype == FORMAT_VideoInfo) &&
(pmtConfig->cbFormat >= sizeof (VIDEOINFOHEADER)) &&
(pmtConfig->pbFormat != NULL))
{
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
pVih->bmiHeader.biWidth = 1280;
pVih->bmiHeader.biHeight = 720;
pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);
hr = pConfig->SetFormat(pmtConfi);
}
DeleteMediaType(pmtConfig);
}
}
}
}////
hr = InitMonikers();
if(FAILED(hr))
{
return hr;
}
hr = pGraphBuilder->AddFilter(pVideoCaptureFilter, L"Video Capture");
if (FAILED(hr))
{
pVideoCaptureFilter->Release();
return hr;
}
AM_MEDIA_TYPE mt;
ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
hr = pSampleGrabber->SetMediaType(&mt);
hr = pSampleGrabber->SetOneShot(FALSE);
hr = pSampleGrabber->SetBufferSamples(TRUE);
hr = pGraphBuilder->AddFilter(pGrabberF, L"Sample Grabber");
if (FAILED(hr))
{
return hr;
}
hr = pCaptureGraphBuilder2->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pVideoCaptureFilter, pGrabberF, 0 );
if (FAILED(hr))
{
pVideoCaptureFilter->Release();
return hr;
}
hr = pSampleGrabber->GetConnectedMediaType( &mt );
if(FAILED( hr ))
{
return -1;
}
VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*) mt.pbFormat;
pVih = (VIDEOINFOHEADER*) mt.pbFormat;
CSampleGrabberCB *CB = new CSampleGrabberCB() ;
if(!FAILED( hr ))
{
CB->Width = vih->bmiHeader.biWidth;
CB->Height = vih->bmiHeader.biHeight;
FreeMediaType( mt );
}
hr = pSampleGrabber->SetCallback( CB, 1 );
pVideoCaptureFilter->Release();
this->SetUpVideoWindow();
hr = pMediaControl->Run();
if (FAILED(hr))
{
return hr;
}
return hr;
}
hr = pCaptureGraphBuilder2->FindInterface()
always failed to get the IAMStreamConfig interface, I really don't know why. Can someone help me, thanks so much!
Your API call below
pCaptureGraphBuilder2->FindInterface(&PIN_CATEGORY_PREVIEW,
&MEDIATYPE_Video,
pVideoCaptureFilter,IID_IAMStreamConfig, (void **) &pConfig);
applies several restrictions to the search, including pin category: you are looking for a preview pin. For example, the graph below features three video capture devices and none of them has the dedicated preview pin: preview pin is optional.
You need to take this into account and either relax the search criteria or I would rather suggest that you locate the pin of your interest directly on the capture filter. Then you will set it and and connect it with downstream peer filters. FindInterface is powerful but it also adds chances to get into confusion.

Capture a frame from video using directshow filters in C++

I have taken a code from the net to capture a frame from a video file and modified to capture all frames and store it as bmp images.
HRESULT GrabVideoBitmap(PCWSTR pszVideoFile)
{
IGraphBuilder *pGraph = NULL;
IMediaControl *pControl = NULL;
IMediaEventEx *pEvent = NULL;
IBaseFilter *pGrabberF = NULL;
ISampleGrabber *pGrabber = NULL;
IBaseFilter *pSourceF = NULL;
IEnumPins *pEnum = NULL;
IPin *pPin = NULL;
IBaseFilter *pNullF = NULL;
long evCode;
wchar_t temp[10];
wchar_t framename[50] = IMAGE_FILE_PATH; // L"D:\\sampleframe";
BYTE *pBuffer = NULL;
HRESULT hr = CoInitialize(NULL);
if (FAILED(hr))
return 0;
hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
IID_PPV_ARGS(&pGraph));
hr = pGraph->QueryInterface(IID_PPV_ARGS(&pControl));
hr = pGraph->QueryInterface(IID_PPV_ARGS(&pEvent));
// Create the Sample Grabber filter.
hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
IID_PPV_ARGS(&pGrabberF));
hr = pGraph->AddFilter(pGrabberF, L"Sample Grabber");
hr = pGrabberF->QueryInterface(IID_PPV_ARGS(&pGrabber));
// Displays the metadata of the file
DisplayFileInfo((wchar_t*)pszVideoFile); // to display video information
AM_MEDIA_TYPE mt;
ZeroMemory(&mt, sizeof(mt));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
hr = pGrabber->SetMediaType(&mt);
hr = pGraph->AddSourceFilter(pszVideoFile, L"Source", &pSourceF);
hr = pSourceF->EnumPins(&pEnum);
while (S_OK == pEnum->Next(1, &pPin, NULL))
{
hr = ConnectFilters(pGraph, pPin, pGrabberF);
SafeRelease(&pPin);
if (SUCCEEDED(hr))
{
break;
}
}
hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER,
IID_PPV_ARGS(&pNullF));
hr = pGraph->AddFilter(pNullF, L"Null Filter");
hr = ConnectFilters(pGraph, pGrabberF, pNullF);
hr = pGrabber->SetOneShot(TRUE);
hr = pGrabber->SetBufferSamples(TRUE);
hr = pControl->Run();
hr = pEvent->WaitForCompletion(INFINITE, &evCode);
for (int i = 0; i < 10; i++)
{
// Find the required buffer size.
long cbBuffer;
hr = pGrabber->GetCurrentBuffer(&cbBuffer, NULL);
pBuffer = (BYTE*)CoTaskMemAlloc(cbBuffer);
hr = pGrabber->GetCurrentBuffer(&cbBuffer, (long*)pBuffer);
hr = pGrabber->GetConnectedMediaType(&mt);
// Examine the format block.
if ((mt.formattype == FORMAT_VideoInfo) &&
(mt.cbFormat >= sizeof(VIDEOINFOHEADER)) &&
(mt.pbFormat != NULL))
{
swprintf(temp, 5, L"%d", i);
wcscat_s(framename, temp);
wcscat_s(framename, L".bmp");
VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)mt.pbFormat;
hr = WriteBitmap((PCWSTR)framename, &pVih->bmiHeader,
mt.cbFormat - SIZE_PREHEADER, pBuffer, cbBuffer);
wcscpy_s(framename, IMAGE_FILE_PATH);
}
else
{
// Invalid format.
hr = VFW_E_INVALIDMEDIATYPE;
}
FreeMediaType(mt);
}
done:
CoTaskMemFree(pBuffer);
SafeRelease(&pPin);
SafeRelease(&pEnum);
SafeRelease(&pNullF);
SafeRelease(&pSourceF);
SafeRelease(&pGrabber);
SafeRelease(&pGrabberF);
SafeRelease(&pControl);
SafeRelease(&pEvent);
SafeRelease(&pGraph);
return hr;
}
The input video file has 132 frames.
But only 68 images are generated.
Also last frame of the video is captured for the last 38 images.
I think the directshow graph is running continuously and WriteBitmap() is missing frames.
How to get the control in directX to capture one frame and write it to bmp file and capture the next frame and thus capture all the frames as bmp images.
Thanks
Arun
Your approach is wrong. Currently, you set the sample grabber to one shot and after that you wait for the graph completion. This way it only works for capturing a single frame. You need to capture the frames inside the ISampleGrabberCB callback of your pGrabber. You need to implement ISampleGrabberCB interface and use ISampleGrabber::SetCallback on your pGrabber filter to point it to your implementation. After that you can capture the frames inside either SampleCB or BufferCB methods. http://www.infognition.com/blog/2013/accessing_raw_video_in_directshow.html

cannot readsample from IMFSource in synchronous mode

I am having trouble with a video recording application that I am writing using Microsoft Media Foundation.
Specifically, the read/write function (which I put on a loop that lives on it's own thread) doesn't make it pass the call to ReadSample:
HRESULT WinCapture::rwFunction(void) {
HRESULT hr;
DWORD streamIndex, flags;
LONGLONG llTimeStamp;
IMFSample *pSample = NULL;
EnterCriticalSection(&m_critsec);
// Read another sample.
hr = m_pReader->ReadSample(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
0,
&streamIndex, // actual
&flags,//NULL, // flags
&llTimeStamp,//NULL, // timestamp
&pSample // sample
);
if (FAILED(hr)) { goto done; }
hr = m_pWriter->WriteSample(0, pSample);
goto done;
done:
return hr;
SafeRelease(&pSample);
LeaveCriticalSection(&m_critsec);
}
The value of hr is an exception code: 0xc00d3704 so the code snippet skips the call to WriteSample.
It is a lot of steps, but I am fairly certain that I am setting up m_pReader (type IMFSource *) correctly.
HRESULT WinCapture::OpenMediaSource(IMFMediaSource *pSource)
{
HRESULT hr = S_OK;
IMFAttributes *pAttributes = NULL;
hr = MFCreateAttributes(&pAttributes, 2);
// use a callback
//if (SUCCEEDED(hr))
//{
// hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this);
//}
// set the desired format type
DWORD dwFormatIndex = (DWORD)formatIdx;
IMFPresentationDescriptor *pPD = NULL;
IMFStreamDescriptor *pSD = NULL;
IMFMediaTypeHandler *pHandler = NULL;
IMFMediaType *pType = NULL;
// create the source reader
if (SUCCEEDED(hr))
{
hr = MFCreateSourceReaderFromMediaSource(
pSource,
pAttributes,
&m_pReader
);
}
// steps to set the selected format type
hr = pSource->CreatePresentationDescriptor(&pPD);
if (FAILED(hr))
{
goto done;
}
BOOL fSelected;
hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
if (FAILED(hr))
{
goto done;
}
hr = pSD->GetMediaTypeHandler(&pHandler);
if (FAILED(hr))
{
goto done;
}
hr = pHandler->GetMediaTypeByIndex(dwFormatIndex, &pType);
if (FAILED(hr))
{
goto done;
}
hr = pHandler->SetCurrentMediaType(pType);
{
goto done;
}
hr = m_pReader->SetCurrentMediaType(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
NULL,
pType
);
// set to maximum framerate?
hr = pHandler->GetCurrentMediaType(&pType);
if (FAILED(hr))
{
goto done;
}
// Get the maximum frame rate for the selected capture format.
// Note: To get the minimum frame rate, use the
// MF_MT_FRAME_RATE_RANGE_MIN attribute instead.
PROPVARIANT var;
if (SUCCEEDED(pType->GetItem(MF_MT_FRAME_RATE_RANGE_MAX, &var)))
{
hr = pType->SetItem(MF_MT_FRAME_RATE, var);
PropVariantClear(&var);
if (FAILED(hr))
{
goto done;
}
hr = pHandler->SetCurrentMediaType(pType);
{
goto done;
}
hr = m_pReader->SetCurrentMediaType(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
NULL,
pType
);
}
goto done;
done:
SafeRelease(&pPD);
SafeRelease(&pSD);
SafeRelease(&pHandler);
SafeRelease(&pType);
SafeRelease(&pAttributes);
return hr;
}
This code is all copied from Microsoft documentation pages and the SDK sample code. The variable formatIdx is 0, I get it from enumerating the camera formats and choosing the first one.
UPDATE
I have rewritten this program so that it uses callbacks instead of a blocking read/write function and I have exactly the same issue.
Here I get the device and initiate the callback method:
HRESULT WinCapture::initCapture(const WCHAR *pwszFileName, IMFMediaSource *pSource) {
HRESULT hr = S_OK;
EncodingParameters params;
params.subtype = MFVideoFormat_WMV3; // TODO, paramterize this
params.bitrate = TARGET_BIT_RATE;
m_llBaseTime = 0;
IMFMediaType *pType = NULL;
DWORD sink_stream = 0;
EnterCriticalSection(&m_critsec);
hr = m_ppDevices[selectedDevice]->ActivateObject(IID_PPV_ARGS(&pSource));
//m_bIsCapturing = false; // this is set externally here
if (SUCCEEDED(hr))
hr = OpenMediaSource(pSource); // also creates the reader
if (SUCCEEDED(hr))
{
hr = m_pReader->GetCurrentMediaType(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
&pType
);
}
// Create the sink writer
if (SUCCEEDED(hr))
{
hr = MFCreateSinkWriterFromURL(
pwszFileName,
NULL,
NULL,
&m_pWriter
);
}
if (SUCCEEDED(hr))
hr = ConfigureEncoder(params, pType, m_pWriter, &sink_stream);
// kick off the recording
if (SUCCEEDED(hr))
{
m_llBaseTime = 0;
m_bIsCapturing = TRUE;
hr = m_pReader->ReadSample(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
0,
NULL,
NULL,
NULL,
NULL
);
}
SafeRelease(&pType);
SafeRelease(&pSource);
pType = NULL;
LeaveCriticalSection(&m_critsec);
return hr;
}
The OpenMediaSource method is here:
HRESULT WinCapture::OpenMediaSource(IMFMediaSource *pSource)
{
HRESULT hr = S_OK;
IMFAttributes *pAttributes = NULL;
hr = MFCreateAttributes(&pAttributes, 2);
// use a callback
if (SUCCEEDED(hr))
{
hr = pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this);
}
// set the desired format type
DWORD dwFormatIndex = (DWORD)formatIdx;
IMFPresentationDescriptor *pPD = NULL;
IMFStreamDescriptor *pSD = NULL;
IMFMediaTypeHandler *pHandler = NULL;
IMFMediaType *pType = NULL;
// create the source reader
if (SUCCEEDED(hr))
{
hr = MFCreateSourceReaderFromMediaSource(
pSource,
pAttributes,
&m_pReader
);
}
// steps to set the selected format type
if (SUCCEEDED(hr)) hr = pSource->CreatePresentationDescriptor(&pPD);
if (FAILED(hr))
{
goto done;
}
BOOL fSelected;
hr = pPD->GetStreamDescriptorByIndex(0, &fSelected, &pSD);
if (FAILED(hr))
{
goto done;
}
hr = pSD->GetMediaTypeHandler(&pHandler);
if (FAILED(hr))
{
goto done;
}
hr = pHandler->GetMediaTypeByIndex(dwFormatIndex, &pType);
if (FAILED(hr))
{
goto done;
}
hr = pHandler->SetCurrentMediaType(pType);
if (FAILED(hr))
{
goto done;
}
// get available framerates
hr = MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &frameRate, &denominator);
std::cout << "frameRate " << frameRate << " denominator " << denominator << std::endl;
hr = m_pReader->SetCurrentMediaType(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
NULL,
pType
);
// set to maximum framerate?
hr = pHandler->GetCurrentMediaType(&pType);
if (FAILED(hr))
{
goto done;
}
goto done;
done:
SafeRelease(&pPD);
SafeRelease(&pSD);
SafeRelease(&pHandler);
SafeRelease(&pType);
SafeRelease(&pAttributes);
return hr;
}
Here, formatIdx is a field of this class that get sets by the user via the GUI. I leave it 0 in order to test. So, I don't think I am missing any steps to get the camera going, but maybe I am.
When I inspect what applications are using the webcam (using this method) after the call to ActivateObject, I see that my application is using the webcam as expected. But, when I enter the callback routine, I see there are two instances of my application using the webcam. This is the same using a blocking method.
I don't know if that is good or bad, but when I enter my callback method:
HRESULT WinCapture::OnReadSample(
HRESULT hrStatus,
DWORD /*dwStreamIndex*/,
DWORD /*dwStreamFlags*/,
LONGLONG llTimeStamp,
IMFSample *pSample // Can be NULL
)
{
EnterCriticalSection(&m_critsec);
if (!IsCapturing() || m_bIsCapturing == false)
{
LeaveCriticalSection(&m_critsec);
return S_OK;
}
HRESULT hr = S_OK;
if (FAILED(hrStatus))
{
hr = hrStatus;
goto done;
}
if (pSample)
{
if (m_bFirstSample)
{
m_llBaseTime = llTimeStamp;
m_bFirstSample = FALSE;
}
// rebase the time stamp
llTimeStamp -= m_llBaseTime;
hr = pSample->SetSampleTime(llTimeStamp);
if (FAILED(hr)) { goto done; }
hr = m_pWriter->WriteSample(0, pSample);
if (FAILED(hr)) { goto done; }
}
// Read another sample.
hr = m_pReader->ReadSample(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
0,
NULL, // actual
NULL, // flags
NULL, // timestamp
NULL // sample
);
done:
if (FAILED(hr))
{
//NotifyError(hr);
}
LeaveCriticalSection(&m_critsec);
return hr;
}
hrStatus is the 0x00d3704 error I was getting before, and the callback goes straight to done thus killing the callbacks.
Finally, I should say that I am modeling (read, 'copying') my code from the example MFCaptureToFile in the Windows SDK Samples and that doesn't work either. Although, there I get this weird negative number for the failed HRESULT: -1072875772.
If you have got error [0xC00D3704] - it means that source does not initialized. Such error can be caused by mistake of initialization, busy camera by another application (process) or unsupport of the camera by UVC driver(old cameras support DirectShow driver with partly compatibleness with UVC. It is possible read some general info from old cameras via UVC as friendly name, symbolic link. However, old cameras support DirectShow models - PUSH, while camera pushes bytes into the pipeline, while Media Foundation PULL data - sends special signal and wait data).
For checking your code I would like advise to research articles about capturing video from web cam on site "CodeProject" - search "videoInput" name.

How to write an avi file with DirectShow

I'm trying to write an avi video file based on the streaming of my Decklink SDI card, after looking in the internet and here I wrote this code by my problem is that the program doesn't write the file here 's the code:
#include <Windows.h>
#include <DShow.h>
#include <iostream>
int main(void)
{
IGraphBuilder* pGraph = NULL;
ICaptureGraphBuilder2* pBuild = NULL;
IMediaControl* pControl = NULL;
ICreateDevEnum* pDevEnum = NULL;
IEnumMoniker* pEnum = NULL;
IMoniker *pMoniker = NULL;
IMoniker* goodMoniker = NULL;
IBaseFilter* pCap;
HRESULT hr;
//Initialize pGraph
hr = CoInitialize(NULL);
hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **) &pGraph);
if(FAILED(hr))
{
printf("ERROR - Could not initialize COM library");
return 1;
}
//Initialize pBuild
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**) &pBuild);
if(FAILED(hr))
{
printf("ERROR - Could not create the Filter Graph Manager.");
return 2;
}
pBuild->SetFiltergraph(pGraph);
hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, reinterpret_cast<void**>(&pDevEnum));
if(SUCCEEDED(hr))
{
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
}
HWND hList;
while(pEnum->Next(1, &pMoniker, NULL) == S_OK)
{
IPropertyBag* pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag));
if(FAILED(hr))
{
pMoniker->Release();
continue;
}
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"Description", &varName, 0);
if(FAILED(hr))
{
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
}
if(SUCCEEDED(hr))
{
for(int i=0;i<20;i++)
{
std::cout<<(char)*(varName.bstrVal + i);
}
char yn;
std::cin>>yn;
if(yn=='Y')
{
std::cout<<"SUCCESSFUL"<<std::endl;
goodMoniker = pMoniker;
VariantClear(&varName);
}
}
pPropBag->Release();
if(pMoniker != goodMoniker)
{
pMoniker->Release();
}
}
hr = goodMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)(&pCap));
if(SUCCEEDED(hr))
{
hr = pGraph->AddFilter(pCap, L"Capture Filter");
}
IBaseFilter *pMux = NULL;
// hr = pBuild->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pCap, NULL, NULL);
hr = pBuild->SetOutputFileName(
&MEDIASUBTYPE_Avi, // Specifies AVI for the target file.
L"C:\\stuff.avi", // File name.
&pCap, // Receives a pointer to the mux.
NULL); // (Optional) Receives a pointer to the file sink.
hr =pBuild->RenderStream(
&PIN_CATEGORY_CAPTURE, // Pin category.
&MEDIATYPE_Video, // Media type.
pCap, // Capture filter.
NULL, // Intermediate filter (optional).
pMux); // Mux or file sink filter.
hr = pCap->QueryInterface(IID_IMediaControl,(void**) pControl);
printf("START ");
hr = pControl->Run();
Sleep(100000);
hr = pControl->Stop();
CoUninitialize();
pGraph->Release();
pBuild->Release();
pCap->Release();
}
here is the error message that I get when I try to debbug it :
Unhandled exception at 0x776015de in STREAMMMMM.exe: 0xC0000005: Access violation reading location 0x00000000.
the program crashes in this line :
r = pCap->QueryInterface(IID_IMediaControl,(void**) pControl);
There are tens, if not hundreds, of examples on how to write into AVI with DirectShow on Internet.
This particular code snippet does not even start writing. It only prepares the pipeline and you are not even doing IMediaControl::Run, what you are supposed to do. Further on, you should wait until you wrote enough, and then stop recording and only then release the interface pointers.
Have a look at this Q: Using a DirectShow filter without registering it, via a private CoCreateInstance on what you are missing to start actual capture and writing (IMediaControl::Run and Sleep in particular).

DirectShow BindToObject returning invalid handle

EDIT
Just tested playcap.cpp and I'm also getting the same error so I know it's a not a fault with my code.
--
EDIT 2
Edited my code to preserve the goodMoniker pointer. Same error, however.
+if(pMoniker != goodMoniker)
+{
pMoniker->Release();
+}
--
Having a problem with getting my webcam to work with DirectShow. This line:
hr = goodMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)(&pCap));
returns the following error from MSVC++ 2010 EE:
First-chance exception at 0x777ff9d2 in WebcamControlTest.exe: 0xC0000008: An invalid handle was specified.
Full code here (ripped almost completely from MSDN):
#include <DShow.h>
#include <iostream>
int main(void)
{
IGraphBuilder* pGraph = NULL;
ICaptureGraphBuilder2* pBuild = NULL;
HRESULT hr;
//Initialize pGraph
hr = CoInitialize(NULL);
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**) &pBuild);
if(FAILED(hr))
{
printf("ERROR - Could not initialize COM library");
return 1;
}
//Initialize pBuild
hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER, IID_IGraphBuilder, (void **) &pGraph);
if(FAILED(hr))
{
printf("ERROR - Could not create the Filter Graph Manager.");
return 2;
}
pBuild->SetFiltergraph(pGraph);
//Initialize pCap
ICreateDevEnum* pDevEnum = NULL;
IEnumMoniker* pEnum = NULL;
hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, reinterpret_cast<void**>(&pDevEnum));
if(SUCCEEDED(hr))
{
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
}
IMoniker* goodMoniker = NULL;
HWND hList;
IMoniker *pMoniker = NULL;
while(pEnum->Next(1, &pMoniker, NULL) == S_OK)
{
IPropertyBag* pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag));
if(FAILED(hr))
{
pMoniker->Release();
continue;
}
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"Description", &varName, 0);
if(FAILED(hr))
{
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
}
if(SUCCEEDED(hr))
{
for(int i=0;i<8;i++)
{
std::cout<<(char)*(varName.bstrVal + i);
}
char yn;
std::cin>>yn;
if(yn=='Y')
{
std::cout<<"SUCCESSFUL"<<std::endl;
goodMoniker = pMoniker;
VariantClear(&varName);
}
}
pPropBag->Release();
if(pMoniker != goodMoniker)
{
pMoniker->Release();
}
}
IBaseFilter* pCap;
hr = goodMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)(&pCap));
if(SUCCEEDED(hr))
{
hr = pGraph->AddFilter(pCap, L"Capture Filter");
}
hr = pBuild->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pCap, NULL, NULL);
while(SUCCEEDED(hr));
pGraph->Release();
pBuild->Release();
pCap->Release();
}
It might be a driver problem as there is one device that works (a virtual driver for screencapping and not actual webcam input) but I've updated, uninstalled and reinstalled to no luck. Any ideas?
After saving a pointer in goodMoniker you release the object a couple of lines below (pMoniker->Release() ). Now goodMoniker points to a released object. You should have increased its reference count.