Serialize: CArchive a CImage - c++

I am currently trying to figure out how to properly store a CImage file within a CArchive (JPEG). My current approach to this is the following (pseudo) code:
BOOL CPicture::Serialize(CArchive &ar)
{
IStream *pStream = NULL;
HRESULT hr;
CImage *img = GetImage();
if (ar.IsLoading())
{
HGLOBAL hMem = GlobalAlloc(GMEM_FIXED, 54262);
hr = CreateStreamOnHGlobal(hMem, FALSE, &pStream);
if(SUCCEEDED(hr))
{
ar.Read(pStream, 54262);
img->Load(pStream);
pStream->Release();
GlobalUnlock(hMem);
GlobalFree(hMem);
}
}
else
{
hr = CreateStreamOnHGlobal(0, TRUE, &pStream);
if (SUCCEEDED(hr))
{
hr = img->Save(pStream, Gdiplus::ImageFormatJPEG);
if (SUCCEEDED(hr))
ar.Write(pStream, 54262);
}
}
...
I am just now getting back into C++ and have only done a little with it in the past. Any help would be very much appreciated.
Thank you very much in advance.

I'm not an expert on IStream, but I think you may not be using it correctly. The following code seems to work. It currently archives in png format, but it can easily be changed through Gdiplus::ImageFormatPNG.
It owes a lot to the tutorial "Embracing IStream as just a stream of bytes" by S. Arman on CodeProject.
void ImageArchive(CImage *pImage, CArchive &ar)
{
HRESULT hr;
if (ar.IsStoring())
{
// create a stream
IStream *pStream = SHCreateMemStream(NULL, 0);
ASSERT(pStream != NULL);
if (pStream == NULL)
return;
// write the image to a stream rather than file (the stream in this case is just a chunk of memory automatically allocated by the stream itself)
pImage->Save(pStream, Gdiplus::ImageFormatPNG); // Note: IStream will automatically grow up as necessary.
// find the size of memory written (i.e. the image file size)
STATSTG statsg;
hr = pStream->Stat(&statsg, STATFLAG_NONAME);
ASSERT(hr == S_OK);
ASSERT(statsg.cbSize.QuadPart < ULONG_MAX);
ULONG nImgBytes = ULONG(statsg.cbSize.QuadPart); // any image that can be displayed had better not have more than MAX_ULONG bytes
// go to the start of the stream
LARGE_INTEGER seekPos;
seekPos.QuadPart = 0;
hr = pStream->Seek(seekPos, STREAM_SEEK_SET, NULL);
ASSERT(hr == S_OK);
// get data in stream into a standard byte array
char *bytes = new char[nImgBytes];
ULONG nRead;
hr = pStream->Read(bytes, nImgBytes, &nRead); // read the data from the stream into normal memory. nRead should be equal to statsg.cbSize.QuadPart.
ASSERT(hr == S_OK);
ASSERT(nImgBytes == nRead);
// write data to archive and finish
ar << nRead; // need to save the amount of memory needed for the file, since we will need to read this amount later
ar.Write(bytes, nRead); // write the data to the archive file from the stream memory
pStream->Release();
delete[] bytes;
}
else
{
// get the data from the archive
ULONG nBytes;
ar >> nBytes;
char *bytes = new char[nBytes]; // ordinary memory to hold data from archive file
UINT nBytesRead = ar.Read(bytes, nBytes); // read the data from the archive file
ASSERT(nBytesRead == UINT(nBytes));
// make the stream
IStream *pStream = SHCreateMemStream(NULL, 0);
ASSERT(pStream != NULL);
if (pStream == NULL)
return;
// put the archive data into the stream
ULONG nBytesWritten;
pStream->Write(bytes, nBytes, &nBytesWritten);
ASSERT(nBytes == nBytesWritten);
if (nBytes != nBytesWritten)
return;
// go to the start of the stream
LARGE_INTEGER seekPos;
seekPos.QuadPart = 0;
hr = pStream->Seek(seekPos, STREAM_SEEK_SET, NULL);
ASSERT(hr == S_OK);
// load the stream into CImage and finish
pImage->Load(pStream); // pass the archive data to CImage
pStream->Release();
delete[] bytes;
}
}

Related

DirectSound API explanation

As a college project we have to develop a Server-Client music streaming application using the DirectSound API. However, due to lack of information, guides or tutorials online, the only source I can gather info about it is the piece of code provided below (which was the only thing provided by the lecturer). Can anyone help me understand the general purpose of these functions and the order they should be implemented in?
Thanks in advance.
IDirectSound8 * directSound = nullptr;
IDirectSoundBuffer * primaryBuffer = nullptr;
IDirectSoundBuffer8 * secondaryBuffer = nullptr;
BYTE * dataBuffer = nullptr;
DWORD dataBufferSize;
DWORD averageBytesPerSecond;
// Search the file for the chunk we want
// Returns the size of the chunk and its location in the file
HRESULT FindChunk(HANDLE fileHandle, FOURCC fourcc, DWORD & chunkSize, DWORD & chunkDataPosition)
{
HRESULT hr = S_OK;
DWORD chunkType;
DWORD chunkDataSize;
DWORD riffDataSize = 0;
DWORD fileType;
DWORD bytesRead = 0;
DWORD offset = 0;
if (SetFilePointer(fileHandle, 0, NULL, FILE_BEGIN) == INVALID_SET_FILE_POINTER)
{
return HRESULT_FROM_WIN32(GetLastError());
}
while (hr == S_OK)
{
if (ReadFile(fileHandle, &chunkType, sizeof(DWORD), &bytesRead, NULL) == 0)
{
hr = HRESULT_FROM_WIN32(GetLastError());
}
if (ReadFile(fileHandle, &chunkDataSize, sizeof(DWORD), &bytesRead, NULL) == 0)
{
hr = HRESULT_FROM_WIN32(GetLastError());
}
switch (chunkType)
{
case fourccRIFF:
riffDataSize = chunkDataSize;
chunkDataSize = 4;
if (ReadFile(fileHandle, &fileType, sizeof(DWORD), &bytesRead, NULL) == 0)
{
hr = HRESULT_FROM_WIN32(GetLastError());
}
break;
default:
if (SetFilePointer(fileHandle, chunkDataSize, NULL, FILE_CURRENT) == INVALID_SET_FILE_POINTER)
{
return HRESULT_FROM_WIN32(GetLastError());
}
}
offset += sizeof(DWORD) * 2;
if (chunkType == fourcc)
{
chunkSize = chunkDataSize;
chunkDataPosition = offset;
return S_OK;
}
offset += chunkDataSize;
if (bytesRead >= riffDataSize)
{
return S_FALSE;
}
}
return S_OK;
}
// Read a chunk of data of the specified size from the file at the specifed location into the
supplied buffer
HRESULT ReadChunkData(HANDLE fileHandle, void * buffer, DWORD buffersize, DWORD bufferoffset)
{
HRESULT hr = S_OK;
DWORD bytesRead;
if (SetFilePointer(fileHandle, bufferoffset, NULL, FILE_BEGIN) == INVALID_SET_FILE_POINTER)
{
return HRESULT_FROM_WIN32(GetLastError());
}
if (ReadFile(fileHandle, buffer, buffersize, &bytesRead, NULL) == 0)
{
hr = HRESULT_FROM_WIN32(GetLastError());
}
return hr;
}
bool Initialise()
{
HRESULT result;
DSBUFFERDESC bufferDesc;
WAVEFORMATEX waveFormat;
// Initialize the direct sound interface pointer for the default sound device.
result = DirectSoundCreate8(NULL, &directSound, NULL);
if (FAILED(result))
{
return false;
}
// Set the cooperative level to priority so the format of the primary sound buffer can be modified.
// We use the handle of the desktop window since we are a console application. If you do write a
// graphical application, you should use the HWnd of the graphical application.
result = directSound->SetCooperativeLevel(GetDesktopWindow(), DSSCL_PRIORITY);
if (FAILED(result))
{
return false;
}
// Setup the primary buffer description.
bufferDesc.dwSize = sizeof(DSBUFFERDESC);
bufferDesc.dwFlags = DSBCAPS_PRIMARYBUFFER | DSBCAPS_CTRLVOLUME;
bufferDesc.dwBufferBytes = 0;
bufferDesc.dwReserved = 0;
bufferDesc.lpwfxFormat = NULL;
bufferDesc.guid3DAlgorithm = GUID_NULL;
// Get control of the primary sound buffer on the default sound device.
result = directSound->CreateSoundBuffer(&bufferDesc, &primaryBuffer, NULL);
if (FAILED(result))
{
return false;
}
// Setup the format of the primary sound bufffer.
// In this case it is a .WAV file recorded at 44,100 samples per second in 16-bit stereo (cd audio
format).
// Really, we should set this up from the wave file format loaded from the file.
waveFormat.wFormatTag = WAVE_FORMAT_PCM;
waveFormat.nSamplesPerSec = 44100;
waveFormat.wBitsPerSample = 16;
waveFormat.nChannels = 2;
waveFormat.nBlockAlign = (waveFormat.wBitsPerSample / 8) * waveFormat.nChannels;
waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
waveFormat.cbSize = 0;
// Set the primary buffer to be the wave format specified.
result = primaryBuffer->SetFormat(&waveFormat);
if (FAILED(result))
{
return false;
}
return true;
}
void Shutdown()
{
// Destroy the data buffer
if (dataBuffer != nullptr)
{
delete[] dataBuffer;
dataBuffer = nullptr;
}
// Release the primary sound buffer pointer.
if (primaryBuffer != nullptr)
{
primaryBuffer->Release();
primaryBuffer = nullptr;
}
// Release the direct sound interface pointer.
if (directSound != nullptr)
{
directSound->Release();
directSound = nullptr;
}
}
// Load the wave file into memory and setup the secondary buffer.
bool LoadWaveFile(TCHAR * filename)
{
WAVEFORMATEXTENSIBLE wfx = { 0 };
WAVEFORMATEX waveFormat;
DSBUFFERDESC bufferDesc;
HRESULT result;
IDirectSoundBuffer * tempBuffer;
DWORD chunkSize;
DWORD chunkPosition;
DWORD filetype;
HRESULT hr = S_OK;
// Open the wave file
HANDLE fileHandle = CreateFile(filename, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, 0,
NULL);
if (fileHandle == INVALID_HANDLE_VALUE)
{
return false;
}
if (SetFilePointer(fileHandle, 0, NULL, FILE_BEGIN) == INVALID_SET_FILE_POINTER)
{
return false;
}
// Make sure we have a RIFF wave file
FindChunk(fileHandle, fourccRIFF, chunkSize, chunkPosition);
ReadChunkData(fileHandle, &filetype, sizeof(DWORD), chunkPosition);
if (filetype != fourccWAVE)
{
return false;
}
// Locate the 'fmt ' chunk, and copy its contents into a WAVEFORMATEXTENSIBLE structure.
FindChunk(fileHandle, fourccFMT, chunkSize, chunkPosition);
ReadChunkData(fileHandle, &wfx, chunkSize, chunkPosition);
// Find the audio data chunk
FindChunk(fileHandle, fourccDATA, chunkSize, chunkPosition);
dataBufferSize = chunkSize;
// Read the audio data from the 'data' chunk. This is the data that needs to be copied into
// the secondary buffer for playing
dataBuffer = new BYTE[dataBufferSize];
ReadChunkData(fileHandle, dataBuffer, dataBufferSize, chunkPosition);
CloseHandle(fileHandle);
// Set the wave format of the secondary buffer that this wave file will be loaded onto.
// The value of wfx.Format.nAvgBytesPerSec will be very useful to you since it gives you
// an approximate value for how many bytes it takes to hold one second of audio data.
waveFormat.wFormatTag = wfx.Format.wFormatTag;
waveFormat.nSamplesPerSec = wfx.Format.nSamplesPerSec;
waveFormat.wBitsPerSample = wfx.Format.wBitsPerSample;
waveFormat.nChannels = wfx.Format.nChannels;
waveFormat.nBlockAlign = wfx.Format.nBlockAlign;
waveFormat.nAvgBytesPerSec = wfx.Format.nAvgBytesPerSec;
waveFormat.cbSize = 0;
// Set the buffer description of the secondary sound buffer that the wave file will be loaded onto.
// In this example, we setup a buffer the same size as that of the audio data. For the assignment,
// your secondary buffer should only be large enough to hold approximately four seconds of data.
bufferDesc.dwSize = sizeof(DSBUFFERDESC);
bufferDesc.dwFlags = DSBCAPS_CTRLVOLUME | DSBCAPS_GLOBALFOCUS | DSBCAPS_CTRLPOSITIONNOTIFY;
bufferDesc.dwBufferBytes = dataBufferSize;
bufferDesc.dwReserved = 0;
bufferDesc.lpwfxFormat = &waveFormat;
bufferDesc.guid3DAlgorithm = GUID_NULL;
// Create a temporary sound buffer with the specific buffer settings.
result = directSound->CreateSoundBuffer(&bufferDesc, &tempBuffer, NULL);
if (FAILED(result))
{
return false;
}
// Test the buffer format against the direct sound 8 interface and create the secondary buffer.
result = tempBuffer->QueryInterface(IID_IDirectSoundBuffer8, (void**)&secondaryBuffer);
if (FAILED(result))
{
return false;
}
// Release the temporary buffer.
tempBuffer->Release();
tempBuffer = nullptr;
return true;
}
void ReleaseSecondaryBuffer()
{
// Release the secondary sound buffer.
if (secondaryBuffer != nullptr)
{
(secondaryBuffer)->Release();
secondaryBuffer = nullptr;
}
}
bool PlayWaveFile()
{
HRESULT result;
unsigned char * bufferPtr1;
unsigned long bufferSize1;
unsigned char * bufferPtr2;
unsigned long bufferSize2;
BYTE * dataBufferPtr = dataBuffer;
DWORD soundBytesOutput = 0;
bool fillFirstHalf = true;
LPDIRECTSOUNDNOTIFY8 directSoundNotify;
DSBPOSITIONNOTIFY positionNotify[2];
// Set position of playback at the beginning of the sound buffer.
result = secondaryBuffer->SetCurrentPosition(0);
if (FAILED(result))
{
return false;
}
// Set volume of the buffer to 100%.
result = secondaryBuffer->SetVolume(DSBVOLUME_MAX);
if (FAILED(result))
{
return false;
}
// Create an event for notification that playing has stopped. This is only useful
// when your audio file fits in the entire secondary buffer (as in this example).
// For the assignment, you are going to need notifications when the playback has reached the
// first quarter of the buffer or the third quarter of the buffer so that you know when
// you should copy more data into the secondary buffer.
HANDLE playEventHandles[1];
playEventHandles[0] = CreateEvent(NULL, FALSE, FALSE, NULL);
result = secondaryBuffer->QueryInterface(IID_IDirectSoundNotify8, (LPVOID*)&directSoundNotify);
if (FAILED(result))
{
return false;
}
// This notification is used to indicate that we have finished playing the buffer of audio. In
// the assignment, you will need two different notifications as mentioned above.
positionNotify[0].dwOffset = DSBPN_OFFSETSTOP;
positionNotify[0].hEventNotify = playEventHandles[0];
directSoundNotify->SetNotificationPositions(1, positionNotify);
directSoundNotify->Release();
// Now we can fill our secondary buffer and play it. In the assignment, you will not be able to fill
// the buffer all at once since the secondary buffer will not be large enough. Instead, you will need to
// loop through the data that you have retrieved from the server, filling different sections of the
// secondary buffer as you receive notifications.
// Lock the first part of the secondary buffer to write wave data into it. In this case, we lock the entire
// buffer, but for the assignment, you will only want to lock the half of the buffer that is not being played.
// You will definately want to look up the methods for the IDIRECTSOUNDBUFFER8 interface to see what these
// methods do and what the parameters are used for.
result = secondaryBuffer->Lock(0, dataBufferSize, (void**)&bufferPtr1, (DWORD*)&bufferSize1, (void**)&bufferPtr2, (DWORD*)&bufferSize2, 0);
if (FAILED(result))
{
return false;
}
// Copy the wave data into the buffer. If you need to insert some silence into the buffer, insert values of 0.
memcpy(bufferPtr1, dataBuffer, bufferSize1);
if (bufferPtr2 != NULL)
{
memcpy(bufferPtr2, dataBuffer, bufferSize2);
}
// Unlock the secondary buffer after the data has been written to it.
result = secondaryBuffer->Unlock((void*)bufferPtr1, bufferSize1, (void*)bufferPtr2, bufferSize2);
if (FAILED(result))
{
return false;
}
// Play the contents of the secondary sound buffer. If you want play to go back to the start of the buffer
// again, set the last parameter to DSBPLAY_LOOPING instead of 0. If play is already in progress, then
// play will just continue.
result = secondaryBuffer->Play(0, 0, 0);
if (FAILED(result))
{
return false;
}
// Wait for notifications. In this case, we only have one notification so we could use WaitForSingleObject,
// but for the assignment you will need more than one notification, so you will need WaitForMultipleObjects
result = WaitForMultipleObjects(1, playEventHandles, FALSE, INFINITE);
// In this case, we have been notified that playback has finished so we can just finish. In the assignment,
// you should use the appropriate notification to determine which part of the secondary buffer needs to be
// filled and handle it accordingly.
CloseHandle(playEventHandles[0]);
return true;
}
DirectSound is deprecated. See below for recommended replacements.
Documentation can be found on Microsoft Docs. The last time samples for DirectSound were shipped was in the legacy DirectX SDK (November 2007) release which is why you are having a hard time finding them. You can find them on GitHub. The headers and link libraries for DirectSound are in the Windows SDK.
Recommendations
For 'real-time mixing and effects' often used in games, the modern replacement is XAudio2. XAudio 2.9 is included in Windows 10, and is available through a simple side-by-side redistribution model for Windows 7, Windows 8.0, and Windows 8.1. Documentation can be found here, samples can be found here, and the
redist can be found here. You may also want to take a look at DirectX Tool Kit for Audio.
For other audio output and input, see Windows Core Audio APIs (WASAPI) which is supported on Windows Vista, Windows 7, Windows 8.0, Windows 8.1, and Windows 10. Documentation can be found here. Some samples can be found on GitHub in Xbox-ATG-Samples and Windows-universal-samples--while these are all UWP samples, the API also supports Win32 desktop.
There's also a new Microsoft Spatial Sounds API on Windows 10 (a.k.a. Windows Sonic). Documentation can be found here. Samples can be found on GitHub in Xbox-ATG-Samples.

How to convert IStream to Base64 string in c++

I have IStream and I want to convert it to base 64 string in c++ so that I can use it in C++/cli to convert it to .net stream.
Similar way, I want to know how to convert base 64 string back to IStream.
My code uses ATL CString...
template<class T>
HRESULT StreamToBase64(IStream* pStream, T &strOut)
{
// Clear buffer
strOut.ReleaseBuffer(0);
if (!pStream)
return S_OK;
// Get size
STATSTG stat;
pStream->Stat(&stat, STATFLAG_NONAME);
unsigned iLen = stat.cbSize.LowPart;
// Seek to start
LARGE_INTEGER lPos;
lPos.QuadPart = 0;
HRESULT hr = pStream->Seek(lPos, SEEK_SET, nullptr);
if (FAILED(hr))
return hr;
// Reserve the memory
strOut.GetBuffer(Mfx::BinToBase64GetRequiredLength(iLen));
strOut.ReleaseBuffer(0);
// Use one line to decode a time. On line of Base64 code has usually 39 chars. We
// use a nearly 1kb buffer
const int iBase64LIneLen = 76/4*3;
BYTE bLine[(1024/iBase64LIneLen)*iBase64LIneLen];
ULONG bRead;
while (SUCCEEDED(hr = pStream->Read(bLine, sizeof(bLine), &bRead)) && bRead!=0)
strOut += T(Mfx::BinToBase64A(bLine, bRead));
// And seek back to start
pStream->Seek(lPos, SEEK_SET, nullptr);
return SUCCEEDED(hr) ? S_OK : hr;
}
HRESULT StreamToBase64(IStream* pStream, CStringA &strOut)
{
return StreamToBase64<CStringA>(pStream, strOut);
}
HRESULT StreamToBase64(IStream* pStream, CStringW &strOut)
{
return StreamToBase64<CStringW>(pStream, strOut);
}

Reading Clipboard with WIC

I'm trying to write a function to read images copied to the clipboard with Windows Imaging Component. Currently my code is based off how I load resources but modified to work with the clipboard. everything up to initializing the stream works fine but once I try to use CreateDecoderFromeStream it fails. I've tried copying images from numerous places to no avail. Is there something different about the copied image format that can't be read by WIC?
Here's my code below...
Creating a memory object to read from the clipboard
COMStreamSPtr WIC::createStreamFromClipboard() {
IStream* ipStream = NULL;
COMStreamSPtr stream = nullptr;
CoInitialize(nullptr);
if (!IsClipboardFormatAvailable(CF_BITMAP) && !IsClipboardFormatAvailable(CF_DIB) && !IsClipboardFormatAvailable(CF_DIBV5))
goto Return;
if (!OpenClipboard(NULL))
goto Return;
// Load the clipboard
HGLOBAL hMem = GetClipboardData(CF_BITMAP);
if (hMem == NULL || hMem == INVALID_HANDLE_VALUE)
hMem = GetClipboardData(CF_DIB);
if (hMem == NULL || hMem == INVALID_HANDLE_VALUE)
hMem = GetClipboardData(CF_DIBV5);
if (hMem == NULL || hMem == INVALID_HANDLE_VALUE)
goto CloseClipboard;
// Lock the clipboard, getting a pointer to its data
LPVOID pvSourceClipboardData = GlobalLock(hMem);
if (pvSourceClipboardData == NULL)
goto CloseClipboard;
// Read the clipboard data size
DWORD dwClipboardSize = GlobalSize(hMem);
if (dwClipboardSize == 0)
goto GlobalUnlock;
// Allocate memory to hold the clipboard data
HGLOBAL hgblClipboardData = GlobalAlloc(GMEM_MOVEABLE, dwClipboardSize);
if (hgblClipboardData == NULL)
goto GlobalUnlock;
// Get a pointer to the allocated memory
LPVOID pvClipboardData = GlobalLock(hgblClipboardData);
if (pvClipboardData == NULL)
goto FreeData;
// Copy the data from the clipboard to the new memory block
CopyMemory(pvClipboardData, pvSourceClipboardData, dwClipboardSize);
GlobalUnlock(hgblClipboardData);
// Create a stream on the HGLOBAL containing the data
if (SUCCEEDED(CreateStreamOnHGlobal(hgblClipboardData, TRUE, &ipStream))) {
stream = std::make_shared<COMStream>(ipStream);
goto CloseClipboard;
}
FreeData:
// Couldn't create stream; free the memory
GlobalFree(hgblClipboardData);
GlobalUnlock:
// Unlock the clipboard data
GlobalUnlock(hMem);
CloseClipboard:
// Close the clipboard
CloseClipboard();
Return:
// No need to unlock or free the resource
CoUninitialize(); // CoInialize is called by my COMObjects in the constructor so this is just decrementing the count.
return stream;
}
Elsewhere in the code using the returned stream.
WICBitmapDecoderSPtr WIC::createDecoderFromStream(COMStreamSPtr stream) {
if (isCOMObjectNull(stream))
return nullptr;
IWICImagingFactory* ipFactory = NULL;
IWICBitmapDecoder* ipDecoder = NULL;
WICBitmapDecoderSPtr decoder = nullptr;
// Create the factory
if (FAILED(CoCreateInstance(CLSID_WICImagingFactory, NULL, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&ipFactory))))
goto Return;
// Create the decoder
if (FAILED(ipFactory->CreateDecoderFromStream(stream->ipObject, NULL, WICDecodeMetadataCacheOnDemand, &ipDecoder)))
goto ReleaseFactory; // FAILS HERE
decoder = std::make_shared<WICBitmapDecoder>(ipDecoder);
ReleaseFactory:
ipFactory->Release();
Return:
return decoder;
}
I've finally figured out how to do this. What I did is I used IWICImagingFactory's CreateBitmapFromHBITMAP function. Since reading an HBITMAP from the clipboard work trivial, the solution becomes pretty straight forward.
IWICBitmapSource* WIC::readSourceFromClipboard() {
IWICBitmapSource* ipSource = NULL;
if (Clipboard::containsFormat(CF_BITMAP)) {
if (OpenClipboard(NULL)) {
HBITMAP hBitmap = (HBITMAP)GetClipboardData(CF_BITMAP);
ipSource = createSourceFromHBitmap(hBitmap);
CloseClipboard();
}
}
return ipSource;
}
IWICBitmapSource* WIC::createSourceFromHBitmap(HBITMAP hBitmap) {
if (hBitmap == NULL)
return NULL;
IWICImagingFactory* ipFactory = NULL;
IWICBitmap* ipBitmap = NULL;
CoInitialize(NULL);
// Create the factory
if (FAILED(CoCreateInstance(CLSID_WICImagingFactory, NULL, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&ipFactory))))
goto Return;
// Create the bitmap
if (FAILED(ipFactory->CreateBitmapFromHBITMAP(hBitmap, NULL, WICBitmapIgnoreAlpha, &ipBitmap)))
goto ReleaseFactory;
ReleaseFactory:
ipFactory->Release();
Return:
CoUninitialize();
return ipBitmap;
}

Video captured by Media Foundation is vertical mirrorred

I'm using Media Foundation IMFSourceReaderCallback implementation for grabbing video frames from the camera, and then OpenCV imshow to present the frames in a loop.
However I get the frames vertically flipped...
Is this a bug? Should I set some attribute to avoid this?
Here is my code:
Initialization:
IMFAttributes* pDeviceAttrs, *pReaderAttrs;
hr = MFCreateAttributes(&pDeviceAttrs, 1);
if (FAILED(hr)) goto Exit;
hr = pDeviceAttrs->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
if (FAILED(hr)) goto Exit;
//...
// Correct source provider is activated through ActivateObject
//
hr = MFCreateAttributes(&pReaderAttrs, 2);
if (FAILED(hr)) goto Exit;
pReaderAttrs->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK,(IUnknown*)this);
pReaderAttrs->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, TRUE);
hr = MFCreateSourceReaderFromMediaSource(pMediaSource, pReaderAttrs, &m_pReader);
if (FAILED(hr)) goto Exit;
// Correct profile is set
OnReadSample implementation:
HRESULT hr = S_OK;
LONG defaultStride = 0;
LONG stride = 0;
BYTE *pBuffer = NULL;
EnterCriticalSection(&m_critSec);
if (NULL != pSample)
{
IMFMediaBuffer* pMediaBuffer;
DWORD dataSize = 0;
// In case of a single buffer, no copy would happen
hr = pSample->ConvertToContiguousBuffer(&pMediaBuffer);
if (FAILED(hr)) goto Cleanup;
pMediaBuffer->GetCurrentLength(&dataSize);
hr = pMediaBuffer->Lock(&pBuffer, &dataSize, &dataSize);
if (FAILED(hr)) goto Cleanup;
// todo: use a backbuffer to avoid sync issues
if (NULL == m_pLatestFrame) m_pLatestFrame = (BYTE*)malloc(dataSize);
memcpy(m_pLatestFrame, pBuffer, dataSize);
++m_frameNumber;
pMediaBuffer->Unlock();
pMediaBuffer->Release();
}
Cleanup:
LeaveCriticalSection(&m_critSec);
// Async ReadFrame for the next buffer:
hr = m_pReader->ReadSample(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
0,
NULL, // actual
NULL, // flags
NULL, // timestamp
NULL // sample
);
return hr;
Conversion to cv::image:
void SourceReaderImpl::GetLatestFrame(BYTE** ppLatestFrame)
{
EnterCriticalSection(&m_critSec);
*ppLatestFrame = m_pLatestFrame;
LeaveCriticalSection(&m_critSec);
}
void* CameraWrapperImpl::getLatestFrame()
{
BYTE* pLatestFrame = NULL;
m_pMfReader->GetLatestFrame(&pLatestFrame);
return pLatestFrame;
}
void Player::Present()
{
//...
color = cv::Mat(colorSize,
CV_8UC3,
static_cast<unsigned char*>(m_pColorCameraImpl->getLatestFrame()));
cv::imshow(color);
}
Any idea?
Thanks in advance!
A bitmap is stored with the last scan line first, so the image will appear upside down. The easiest solution is to call cv::flip
void Player::Present()
{
//...
color = cv::Mat(colorSize,
CV_8UC3,
static_cast<unsigned char*>(m_pColorCameraImpl->getLatestFrame()));
cv::Mat corrected;
flip(color, corrected, 0);
imshow(corrected);
}

Unable to read xml string with xmllite using memory buffer

I am trying to open an in memory stream for use with the xmllite library. Writing to one works ok, but reading from one is giving me a hard time. Below is the code that I am using. Basically I create a default xml string (LPWSTR) and write it to a memory stream using CreateStreamOnHGlobal. I then seek to the beginning and read it back to make sure its in there (it is). Then I seek back again and assign it to the input of the reader. It never gets past the line:
while (S_OK == (hr = pReader->Read(&nodeType)))
I get an XmlNodeType_None and an HRESULT value of -1072894427. I believe it is having trouble reading the stream, but I dont know for sure. The same code works fine if I use a filestream instead and writing to the xml from the memory stream works as well
HRESULT hr = S_OK; CComPtr<IStream> pStream = NULL;
IXmlReader *pReader = NULL;
XmlNodeType nodeType;
LPWSTR pwszXMLString =
L"<?xml version\"1.0\" encoding=\"UTF-8\" ?>\r\n"
L"<paramlist name=\"LP\">\r\n"
L"<value></value>\r\n"
L"<value></value>\r\n"
L"</paramlist>\r\n"
L"<param name=\"AutoConnect\">false</param>\r\n"
L"<param name=\"ConnectWhenLit\">false</param>\r\n"
L"<param name=\"SessionMaxBytes\">200000</param>\r\n"
L"<param name=\"SessionTimeoutSecs\">300</param>\r\n"
L"<param name=\"PacketDelayMs\">0</param>\r\n"
L"<param name=\"PacketSizeBytes\">4096</param>\r\n"
L"<param name=\"LowSSLSecurity\">true</param>\r\n";
DWORD dwWritten = 0;
hr = CreateStreamOnHGlobal(NULL, FALSE, &pStream);
hr = pStream->Write(pwszXMLString, wcslen(pwszXMLString) * sizeof(WCHAR), &dwWritten);
// print out the contents of the memory stream just to make sure we have it
LARGE_INTEGER pos;
pos.QuadPart = 0;
pStream->Seek(pos, STREAM_SEEK_SET, NULL);
STATSTG ssStreamData = {0};
pStream->Stat(&ssStreamData, STATFLAG_NONAME);
SIZE_T cbSize = ssStreamData.cbSize.LowPart;
LPWSTR pwszContent = (WCHAR*) new BYTE[cbSize + sizeof(WCHAR)];
if (pwszContent == NULL)
return E_OUTOFMEMORY;
pStream->Seek(pos, STREAM_SEEK_SET, NULL);
SIZE_T cbRead;
pStream->Read(pwszContent, cbSize, &cbRead);
pwszContent[cbSize/sizeof(WCHAR)] = L'\0';
CZString czContent;
czContent.LoadWideString(pwszContent, cbSize);
wprintf(L"%S", czContent.GetString().c_str());
pStream->Seek(pos, STREAM_SEEK_SET, NULL);
if (hr == S_OK)
{
typedef HRESULT (WINAPI *CreateXmlReaderFunc)(const IID & riid, void** ppvObject, IMalloc * pMalloc);
CreateXmlReaderFunc _CreateXmlReaderFunc = (CreateXmlReaderFunc)GetProcAddress(m_hXMLLite, "CreateXmlReader");
if (FAILED(hr = _CreateXmlReaderFunc(__uuidof(IXmlReader), (void**) &pReader, NULL)))
{
MessageBox(NULL, CStringHelper::Format(L"Error: GetProcAddress() failed to find 'CreateXmlReader' %d\n", GetLastError()).c_str(), L"Error", MB_OK);
return -1;
}
pReader->SetInput(pStream);
}
while (S_OK == (hr = pReader->Read(&nodeType)))
{
switch (nodeType)
{
// parse xml here
}
}
return 0;
Your xml string is not correct. There must be "=" between "version" and "1.0". Secondly, the string uses UTF-16, while the header states it is UTF-8. Change UTF-8 to UTF-16 in header or remove encoding attribute.
Either
LPWSTR pwszXMLString =
L"<?xml version=\"1.0\" encoding=\"UTF-16\" ?>\r\n"
or
LPWSTR pwszXMLString =
L"<?xml version=\"1.0\"?>\r\n"
works.