Get Pixels Buffer from ID3D11Texture2D C++ - c++

I would like to extract pixels from ID3D11Texture2D.
I have this function :
DUPL_RETURN DISPLAYMANAGER::CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(DirtyCount) RECT* DirtyBuffer, UINT DirtyCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc)
from desktop duplication sample https://code.msdn.microsoft.com/windowsdesktop/Desktop-Duplication-Sample-da4c696a/sourcecode?fileId=42782&pathId=1384140008
Here is my code, the buffer returned is full of '\0'...
BYTE* DISPLAYMANAGER::GetImageData(ID3D11Device* device, ID3D11DeviceContext* context, ID3D11Texture2D* texture2D, D3D11_TEXTURE2D_DESC Desc)
{
if (texture2D != NULL)
{
D3D11_TEXTURE2D_DESC description;
texture2D->GetDesc(&description);
description.BindFlags = 0;
description.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
description.Usage = D3D11_USAGE_STAGING;
ID3D11Texture2D* texTemp = NULL;
HRESULT hr = device->CreateTexture2D(&description, NULL, &texTemp);
if (FAILED(hr))
{
if (texTemp)
{
texTemp->Release();
texTemp = NULL;
}
return NULL;
}
context->CopyResource(texTemp, texture2D);
D3D11_MAPPED_SUBRESOURCE mapped;
unsigned int subresource = 0;
hr = context->Map(texTemp, 0, D3D11_MAP_READ, 0, &mapped);
if (FAILED(hr))
{
texTemp->Release();
texTemp = NULL;
return NULL;
}
Desc.Width = description.Width;
Desc.Height = description.Height;
const int pitch = mapped.RowPitch;
BYTE* source = (BYTE*)(mapped.pData);
BYTE* dest = new BYTE[(Desc.Width)*(Desc.Height) * 4];
BYTE* destTemp = dest;
for (int i = 0; i < Desc.Height; ++i)
{
memcpy(destTemp, source, Desc.Width * 4);
source += pitch;
destTemp += Desc.Width * 4;
}
context->Unmap(texTemp, 0);
return dest;
}
else
return NULL;
}
I pass this function in CopyDirty(_In_ ID3D11Texture2D* SrcSurface, _Inout_ ID3D11Texture2D* SharedSurf, _In_reads_(DirtyCount) RECT* DirtyBuffer,
UINT DirtyCount, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc, D3D11_TEXTURE2D_DESC Desc)
like this :
...
m_DeviceContext->Draw(NUMVERTICES * DirtyCount, 0);
BYTE *Bytes = GetImageData(m_Device, m_DeviceContext, SrcSurface, Desc);
Thank you all

Related

E_INVALIDARG in MAP (D3D11)

Good afternoon.
I am trying to write a program that will display pixels for me from the desktop, but I get an E_INVALIDARG error in the Map function.
Below I have attached my function and indicated with a comment where I got this error. I also inserted the D3D11_CREATE_DEVICE_DEBUG flag in the D3D11CreateDevice function, but it doesn't output anything. Please help.
BYTE* GetImageData()
{
if (pDupTex2D != NULL)
{
printf("Tap1\n");
/*
D3D11_TEXTURE2D_DESC description;
pDupTex2D->GetDesc(&description);
description.BindFlags = 0;
description.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
description.Usage = D3D11_USAGE_STAGING;
description.Format = DXGI_FORMAT_B8G8R8A8_UNORM;*/
D3D11_TEXTURE2D_DESC Desc;
//Desc.Width = 3440;
//Desc.Height = 1440;
Desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
Desc.Usage = D3D11_USAGE_STAGING;
Desc.BindFlags = 0;
Desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
pDupTex2D->GetDesc(&Desc);
ID3D11Texture2D *texTemp = NULL;
HRESULT hr = pD3DDev->CreateTexture2D(&Desc, NULL, &texTemp);
std::cout << std::hex<<hr << std::endl;
if (FAILED(hr))
{
if (texTemp)
{
texTemp->Release();
texTemp = NULL;
}
return NULL;
}
printf("Tap2\n");
pCtx->CopyResource(texTemp, pDupTex2D);
D3D11_MAPPED_SUBRESOURCE mapped;
unsigned int subresource = D3D11CalcSubresource(0, 0, 0);
hr = pCtx->Map(texTemp, subresource, D3D11_MAP_READ_WRITE, 0, &mapped); // ERROR ===================================
std::cout << std::hex << hr << std::endl;
if (FAILED(hr))
{
texTemp->Release();
texTemp = NULL;
return NULL;
}
printf("Tap3\n");
unsigned char* captureData = new unsigned char[Desc.Width * Desc.Height * 4];
RtlZeroMemory(captureData, Desc.Width * Desc.Height * 4);
const int pitch = mapped.RowPitch;
unsigned char* source = static_cast<unsigned char*>(mapped.pData);
unsigned char* dest = captureData;
for (int i = 0; i < Desc.Height; i++) {
memcpy(captureData, source, Desc.Width * 4);
source += pitch;
captureData += Desc.Width * 4;
}
for (int i = 0; i < Desc.Width * Desc.Height * 4; i++) {
printf("h");
std::cout << "Pixel[%d] = %x\n" << i << dest[i] << std::endl;
}
pCtx->Unmap(texTemp, 0);
return dest;
}
else {
return NULL;
}
}

how to make a screenshot for windows with minimum size with c ++

I realized a program which makes a screenshot but the problem is that the image size is larger more than 6 MB.
I want to make a correction to minimize the image size.
this is my function
BOOL CALLBACK MonitorEnumProcCallback( HMONITOR hMonitor, HDC DevC,LPRECT lprcMonitor,LPARAM dwData)
{
const char*BmpName;
string BmpNameString;
BmpNameString="screen.jpeg";
BmpName= BmpNameString.c_str();
MONITORINFO info;
info.cbSize = sizeof(MONITORINFO);
BOOL monitorInfo = GetMonitorInfo(hMonitor, &info);
if (monitorInfo) {
DWORD Width = info.rcMonitor.right - info.rcMonitor.left;
DWORD Height = info.rcMonitor.bottom - info.rcMonitor.top;
DWORD FileSize = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER) + 3 * Width * Height;
char *BmpFileData = (char*)GlobalAlloc(0x0040, FileSize);
PBITMAPFILEHEADER BFileHeader = (PBITMAPFILEHEADER)BmpFileData;
PBITMAPINFOHEADER BInfoHeader = (PBITMAPINFOHEADER)&BmpFileData[sizeof(BITMAPFILEHEADER)];
BFileHeader->bfType = 0x4D42; // BM
BFileHeader->bfSize = sizeof(BITMAPFILEHEADER);
BFileHeader->bfOffBits = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER);
BInfoHeader->biSize = sizeof(BITMAPINFOHEADER);
BInfoHeader->biPlanes = 1;
BInfoHeader->biBitCount = 24;
BInfoHeader->biCompression = BI_RGB;
BInfoHeader->biHeight = Height;
BInfoHeader->biWidth = Width;
RGBTRIPLE *Image = (RGBTRIPLE*)&BmpFileData[sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER)];
RGBTRIPLE color;
HDC CaptureDC = CreateCompatibleDC(DevC);
HBITMAP CaptureBitmap = CreateCompatibleBitmap(DevC, Width, Height);
SelectObject(CaptureDC, CaptureBitmap);
BitBlt(CaptureDC, 0, 0, Width, Height, DevC, info.rcMonitor.left, info.rcMonitor.top, SRCCOPY | CAPTUREBLT);
GetDIBits(CaptureDC, CaptureBitmap, 0, Height, Image, (LPBITMAPINFO)BInfoHeader, DIB_RGB_COLORS);
DWORD Junk;
HANDLE FH = CreateFileA(BmpName, GENERIC_WRITE, FILE_SHARE_WRITE, 0, CREATE_ALWAYS, 0, 0);
WriteFile(FH, BmpFileData, FileSize, &Junk, 0);
CloseHandle(FH);
GlobalFree(BmpFileData);
}
return TRUE;
}
data
If you don't mind, you can use the GDI + method below to get a relatively small size:
#include<windows.h>
#include <iostream>
#include <string.h>
#include <gdiplus.h>
#include <stdio.h>
using namespace Gdiplus;
using namespace std;
#pragma comment(lib, "Gdiplus.lib")
int GetEncoderClsid(const WCHAR* format, CLSID* pClsid)
{
UINT num = 0; // number of image encoders
UINT size = 0; // size of the image encoder array in bytes
ImageCodecInfo* pImageCodecInfo = NULL;
GetImageEncodersSize(&num, &size);
if (size == 0)
return -1; // Failure
pImageCodecInfo = (ImageCodecInfo*)(malloc(size));
if (pImageCodecInfo == NULL)
return -1; // Failure
GetImageEncoders(num, size, pImageCodecInfo);
for (UINT j = 0; j < num; ++j)
{
if (wcscmp(pImageCodecInfo[j].MimeType, format) == 0)
{
*pClsid = pImageCodecInfo[j].Clsid;
free(pImageCodecInfo);
return j; // Success
}
}
free(pImageCodecInfo);
return -1; // Failure
}
BOOL CALLBACK MonitorEnumProcCallback(HMONITOR hMonitor, HDC DevC, LPRECT lprcMonitor, LPARAM dwData)
{
wstring BmpNameString = L"screen.jpeg";
MONITORINFO info;
info.cbSize = sizeof(MONITORINFO);
BOOL monitorInfo = GetMonitorInfo(hMonitor, &info);
if (monitorInfo) {
DWORD Width = info.rcMonitor.right - info.rcMonitor.left;
DWORD Height = info.rcMonitor.bottom - info.rcMonitor.top;
//DWORD FileSize = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER) + 3 * Width * Height;
//char* BmpFileData = (char*)GlobalAlloc(0x0040, FileSize);
//PBITMAPFILEHEADER BFileHeader = (PBITMAPFILEHEADER)BmpFileData;
//PBITMAPINFOHEADER BInfoHeader = (PBITMAPINFOHEADER)&BmpFileData[sizeof(BITMAPFILEHEADER)];
//BFileHeader->bfType = 0x4D42; // BM
//BFileHeader->bfSize = sizeof(BITMAPFILEHEADER);
//BFileHeader->bfOffBits = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER);
//BInfoHeader->biSize = sizeof(BITMAPINFOHEADER);
//BInfoHeader->biPlanes = 1;
//BInfoHeader->biBitCount = 24;
//BInfoHeader->biCompression = BI_RLE8;
//BInfoHeader->biHeight = Height;
//BInfoHeader->biWidth = Width;
//RGBTRIPLE* Image = (RGBTRIPLE*)&BmpFileData[sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER)];
//RGBTRIPLE color;
HDC CaptureDC = CreateCompatibleDC(DevC);
HBITMAP CaptureBitmap = CreateCompatibleBitmap(DevC, Width, Height);
HGDIOBJ old_obj = SelectObject(CaptureDC, CaptureBitmap);
BitBlt(CaptureDC, 0, 0, Width, Height, DevC, info.rcMonitor.left, info.rcMonitor.top, SRCCOPY | CAPTUREBLT);
//GetDIBits(CaptureDC, CaptureBitmap, 0, Height, Image, (LPBITMAPINFO)BInfoHeader, DIB_RGB_COLORS);
/*DWORD Junk;
HANDLE FH = CreateFileA(BmpName, GENERIC_WRITE, FILE_SHARE_WRITE, 0, CREATE_ALWAYS, 0, 0);
WriteFile(FH, BmpFileData, FileSize, &Junk, 0);
CloseHandle(FH);
GlobalFree(BmpFileData);*/
Gdiplus::Bitmap bitmap(CaptureBitmap, NULL);
CLSID pngClsid;
GetEncoderClsid(L"image/jpeg", &pngClsid);
bitmap.Save(BmpNameString.c_str(), &pngClsid, NULL);
SelectObject(CaptureDC, old_obj);
DeleteDC(CaptureDC);
ReleaseDC(NULL, DevC);
DeleteObject(CaptureBitmap);
}
return TRUE;
}
int main()
{
GdiplusStartupInput gdiplusStartupInput;
ULONG_PTR gdiplusToken;
GdiplusStartup(&gdiplusToken, &gdiplusStartupInput, NULL);
HDC hdc = GetDC(NULL);
EnumDisplayMonitors(hdc,NULL, MonitorEnumProcCallback,NULL);
ReleaseDC(NULL,hdc);
GdiplusShutdown(gdiplusToken);
return 0;
}

DirectX11 C++ Shader Buffer becomes null on polygon layout description

I am developing a game engine using DX11. My problem is that I am getting a read access violation because vertexShaderBuffer was nullptr.
bool TerrainShaderClass::InitializeShader(ID3D11Device* device, HWND hwnd, LPCSTR vsFileName, LPCSTR psFileName)
{
HRESULT result;
ID3D10Blob* errorMessage = nullptr;
ID3D10Blob* vertexShaderBuffer = nullptr;
ID3D10Blob* pixelShaderBuffer = nullptr;
D3D11_INPUT_ELEMENT_DESC polygonLayout[3];
unsigned int numElements;
D3D11_SAMPLER_DESC samplerDesc;
D3D11_BUFFER_DESC matrixBufferDesc;
D3D11_BUFFER_DESC lightBufferDesc;
result = D3DX11CompileFromFile(vsFileName, NULL, NULL, "TerrainVertexShader", "vs_5_0", D3D10_SHADER_ENABLE_STRICTNESS, 0, NULL,
&vertexShaderBuffer, &errorMessage, NULL);
if (FAILED(result))
{
if (errorMessage)
{
OutputShaderErrorMessage(errorMessage, hwnd, vsFileName);
}
else
{
MessageBox(hwnd, vsFileName, "Missing Shader File", MB_OK);
}
return false;
}
result = D3DX11CompileFromFile(psFileName, NULL, NULL, "TerrainPixelShader", "ps_5_0", D3D10_SHADER_ENABLE_STRICTNESS, 0, NULL,
&pixelShaderBuffer, &errorMessage, NULL);
if (FAILED(result))
{
if (errorMessage)
{
OutputShaderErrorMessage(errorMessage, hwnd, psFileName);
}
else
{
MessageBox(hwnd, psFileName, "Missing Shader File", MB_OK);
}
return false;
}
result = device->CreateVertexShader(vertexShaderBuffer->GetBufferPointer(), vertexShaderBuffer->GetBufferSize(), NULL,
&m_vertexShader);
if (FAILED(result))
{
return false;
}
result = device->CreatePixelShader(pixelShaderBuffer->GetBufferPointer(), pixelShaderBuffer->GetBufferSize(), NULL,
&m_pixelShader);
if (FAILED(result))
{
return false;
}
// This setup needs to match the VertexType stucture in the ModelClass and in the shader.
polygonLayout[0].SemanticName = "POSITION";
polygonLayout[0].SemanticIndex = 0;
polygonLayout[0].Format = DXGI_FORMAT_R32G32B32_FLOAT;
polygonLayout[0].InputSlot = 0;
polygonLayout[0].AlignedByteOffset = 0;
polygonLayout[0].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[0].InstanceDataStepRate = 0;
polygonLayout[1].SemanticName = "TEXCOORD";
polygonLayout[1].SemanticIndex = 0;
polygonLayout[1].Format = DXGI_FORMAT_R32G32_FLOAT;
polygonLayout[1].InputSlot = 0;
polygonLayout[1].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
polygonLayout[1].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[1].InstanceDataStepRate = 0;
polygonLayout[2].SemanticName = "NORMAL";
polygonLayout[2].SemanticIndex = 0;
polygonLayout[2].Format = DXGI_FORMAT_R32G32B32_FLOAT;
polygonLayout[2].InputSlot = 0;
polygonLayout[2].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
polygonLayout[2].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[2].InstanceDataStepRate = 0;
polygonLayout[3].SemanticName = "COLOR";
polygonLayout[3].SemanticIndex = 0;
polygonLayout[3].Format = DXGI_FORMAT_R32G32B32_FLOAT;
polygonLayout[3].InputSlot = 0;
polygonLayout[3].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
polygonLayout[3].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
polygonLayout[3].InstanceDataStepRate = 0;
numElements = sizeof(polygonLayout) / sizeof(polygonLayout[0]);
result = device->CreateInputLayout(polygonLayout, numElements, vertexShaderBuffer->GetBufferPointer(),
vertexShaderBuffer->GetBufferSize(), &m_layout);
if (FAILED(result))
{
return false;
}
vertexShaderBuffer->Release();
vertexShaderBuffer = nullptr;
pixelShaderBuffer->Release();
pixelShaderBuffer = nullptr;
//Continues
When the code reaches polygonLayout[3].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;, the buffer is set as normal. But after that line ( polygonLayout[3].InstanceDataStepRate = 0;) the value goes null for no appearant reason, with the
result = device->CreateInputLayout(polygonLayout, numElements, vertexShaderBuffer->GetBufferPointer(),
vertexShaderBuffer->GetBufferSize(), &m_layout);
line throwing the exception.
I tried searching online with no results. Any help is much appreciated. Thanks in advance.
polygonLayout array contains only 3 items so when you fill polygonLayout[3] you are producing buffer overrun and face Undefined Behavior (potentially corrupting other variables stored on the stack). It would be a good idea to 1) make it contain 4 items; 2) use array wrapper with (debug) indexing check:
::std::array<D3D11_INPUT_ELEMENT_DESC, 4> polygonLayout;

Get Raw Pixel Data with DirectXTK

I would like to access pixel buffer data from Dirty Rect after calling AcquireNextFrame from DXGI.
Here is my code to get pixel buffer data from D3D11Texture2D :
BYTE* DISPLAYMANAGER::GetImageData(ID3D11Texture2D* texture2D, D3D11_TEXTURE2D_DESC Desc)
{
if (texture2D != NULL)
{
D3D11_TEXTURE2D_DESC description;
texture2D->GetDesc(&description);
description.BindFlags = 0;
description.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
description.Usage = D3D11_USAGE_STAGING;
description.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
ID3D11Texture2D* texTemp = NULL;
HRESULT hr = m_Device->CreateTexture2D(&description, NULL, &texTemp);
if (FAILED(hr))
{
if (texTemp)
{
texTemp->Release();
texTemp = NULL;
}
return NULL;
}
m_DeviceContext->CopyResource(texTemp, texture2D);
D3D11_MAPPED_SUBRESOURCE mapped;
unsigned int subresource = D3D11CalcSubresource(0, 0, 0);
hr = m_DeviceContext->Map(texTemp, subresource, D3D11_MAP_READ_WRITE, 0, &mapped);
if (FAILED(hr))
{
texTemp->Release();
texTemp = NULL;
return NULL;
}
unsigned char *captureData = new unsigned char[Desc.Width * Desc.Height * 4];
RtlZeroMemory(captureData, Desc.Width * Desc.Height * 4);
const int pitch = mapped.RowPitch;
unsigned char *source = static_cast<unsigned char*>(mapped.pData);
unsigned char *dest = captureData;
for (int i = 0; i < Desc.Height; i++) {
memcpy(captureData, source, Desc.Width * 4);
source += pitch;
captureData += Desc.Width * 4;
}
for (int i = 0; i < Desc.Width * Desc.Height * 4; i++) {
//trace(L"Pixel[%d] = %x\n", i, dest[i]);
}
m_DeviceContext->Unmap(texTemp, 0);
return dest;
}
else
return NULL;
}
Output of dest is full of '0' value. Even mapped.pData is full of '0'. I don't know why.
Here is where I call my function GetImageData
DUPL_RETURN DISPLAYMANAGER::ProcessFrame(_In_ FRAME_DATA* Data, _Inout_ ID3D11Texture2D* SharedSurf, INT OffsetX, INT OffsetY, _In_ DXGI_OUTPUT_DESC* DeskDesc)
{
DUPL_RETURN Ret = DUPL_RETURN_SUCCESS;
// Process dirties and moves
if (Data->FrameInfo.TotalMetadataBufferSize)
{
D3D11_TEXTURE2D_DESC Desc;
Data->Frame->GetDesc(&Desc);
if (Data->MoveCount)
{
Ret = CopyMove(SharedSurf, reinterpret_cast<DXGI_OUTDUPL_MOVE_RECT*>(Data->MetaData), Data->MoveCount, OffsetX, OffsetY, DeskDesc, Desc.Width, Desc.Height);
if (Ret != DUPL_RETURN_SUCCESS)
{
return Ret;
}
}
if (Data->DirtyCount)
{
Ret = CopyDirty(Data->Frame, SharedSurf, reinterpret_cast<RECT*>(Data->MetaData + (Data->MoveCount * sizeof(DXGI_OUTDUPL_MOVE_RECT))), Data->DirtyCount, OffsetX, OffsetY, DeskDesc, Desc);
GetImageData(Data->Frame, Desc); //here I would like to extract the dirty rect buffer pixel (BGRA value).
}
}
return Ret;
}
Is it possible to do it ? I got this code from Microsoft Sample Desktop Duplication API. Thanks!

How to add audio to an mp4 file created with a SinkWriter?

I want to make an mp4 file as a sink writer.
The video could be made by providing samples from msdn, but the audio could not be created.
So I did a search and got this source. But this source did not hear any audio either.
Is this a bad source? So where is the wrong part?
Here is search code:
//Audio constants
const GUID AUDIO_MAJOR_TYPE = MFMediaType_Audio; //for audio treatment
const GUID AUDIO_ENCODING_FORMAT = MFAudioFormat_AAC; //encoding format (output)
const GUID AUDIO_INPUT_FORMAT = MFAudioFormat_PCM; //input format
const UINT32 AUDIO_SAMPLES_PER_SECOND = 44100; //samples per second
const UINT32 AUDIO_AVG_BYTES_PER_SECOND = 16000; //average bytes per second
const UINT32 AUDIO_NUM_CHANNELS = 1; //MONO or STEREO
const UINT32 AUDIO_BITS_PER_SAMPLE = 16; //bits per sample
const UINT32 AUDIO_ONE_SECOND = 10; //quantity of buffers per second
const UINT32 AUDIO_BUFFER_LENGTH = AUDIO_BITS_PER_SAMPLE / 8 * AUDIO_NUM_CHANNELS * AUDIO_SAMPLES_PER_SECOND; //max. buffer size
const LONGLONG AUDIO_SAMPLE_DURATION = 10000000; //sample duration
//Creation of a template to release pointers
template <class T> void SafeRelease(T **ppT)
{
if (*ppT)
{
(*ppT)->Release();
*ppT = NULL;
}
}
//Creation of the Byte Stream
IMFByteStream* CreateFileByteStream(LPCWSTR FileName)
{
//create file byte stream
IMFByteStream *pByteStream = NULL;
HRESULT hr = MFCreateFile(MF_ACCESSMODE_WRITE, MF_OPENMODE_DELETE_IF_EXIST, MF_FILEFLAGS_NONE, FileName, &pByteStream);
if (FAILED(hr))
pByteStream = NULL;
return pByteStream;
}
//Creation of the Video profile (H264)
IMFMediaType* CreateVideoProfile()
//Creation of the Audio profile (AAC)
IMFMediaType* CreateAudioProfile()
{
IMFMediaType *pMediaType = NULL;
HRESULT hr = MFCreateMediaType(&pMediaType);
if (SUCCEEDED(hr))
{
hr = pMediaType->SetGUID(MF_MT_MAJOR_TYPE, AUDIO_MAJOR_TYPE);
}
if (SUCCEEDED(hr))
{
hr = pMediaType->SetGUID(MF_MT_SUBTYPE, AUDIO_ENCODING_FORMAT);
}
if (SUCCEEDED(hr))
{
hr = pMediaType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, AUDIO_BITS_PER_SAMPLE);
}
if (SUCCEEDED(hr))
{
hr = pMediaType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, AUDIO_SAMPLES_PER_SECOND);
}
if (SUCCEEDED(hr))
{
hr = pMediaType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, AUDIO_NUM_CHANNELS);
}
if (SUCCEEDED(hr))
{
hr = pMediaType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, AUDIO_AVG_BYTES_PER_SECOND);
}
if (FAILED(hr))
pMediaType = NULL;
return pMediaType;
}
//Create an aggregate source (both audio and video)
IMFMediaSource* CreateAggregatedSource(IMFMediaSource *pSource1, IMFMediaSource *pSource2, IMFMediaSource *pAggSource)
{
pAggSource = NULL;
IMFCollection *pCollection = NULL;
HRESULT hr = MFCreateCollection(&pCollection);
if (SUCCEEDED(hr))
{
hr = pCollection->AddElement(pSource1);
}
if (SUCCEEDED(hr))
{
hr = pCollection->AddElement(pSource2);
}
if (SUCCEEDED(hr))
{
hr = MFCreateAggregateSource(pCollection, &pAggSource);
}
SafeRelease(&pCollection);
if (FAILED(hr))
pAggSource = NULL;
return pAggSource;
}
//Creation of the MPEG-4 MediaSink
IMFMediaSink* CreateMediaSink(IMFByteStream *pByteStream, IMFMediaType *pVideoMediaType, IMFMediaType *pAudioMediaType)
{
IMFMediaSink *pMediaSink = NULL;
DWORD pdwCharac = NULL;
DWORD pdwStreamCount = NULL;
HRESULT hr = MFCreateMPEG4MediaSink(pByteStream, pVideoMediaType, pAudioMediaType, &pMediaSink);
//// DEBUG ////
pMediaSink->GetCharacteristics(&pdwCharac);
pMediaSink->GetStreamSinkCount(&pdwStreamCount);
if (FAILED(hr))
pMediaSink = NULL;
return pMediaSink;
}
IMFAttributes* CreateAttributesForSinkWriter()
{
IMFAttributes *pMFAttributes = NULL;
HRESULT hr = MFCreateAttributes(&pMFAttributes, 100);
if (SUCCEEDED(hr))
{
hr = pMFAttributes->SetGUID(MF_TRANSCODE_CONTAINERTYPE, MFTranscodeContainerType_MPEG4);
}
if (SUCCEEDED(hr))
{
hr = pMFAttributes->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, FALSE); //no hardware encoding
}
if (SUCCEEDED(hr))
{
hr = pMFAttributes->SetUINT32(MF_READWRITE_DISABLE_CONVERTERS, FALSE); //enable converting formats
}
if (FAILED(hr))
pMFAttributes = NULL;
return pMFAttributes;
}
//Initialization of the Video SinkWriter...
HRESULT InitializeSinkWriterVideo(IMFSinkWriter **ppWriter, DWORD *pStreamIndex, IMFMediaSink *pMediaSink)
//Initialization of the Audio SinkWriter...
HRESULT InitializeSinkWriterAudio(IMFSinkWriter **ppWriter, DWORD *pStreamIndex, IMFMediaSink *pMediaSink)
{
*ppWriter = NULL;
*pStreamIndex = NULL;
IMFSinkWriter *pSinkWriter = NULL;
IMFMediaType *pMediaTypeOut = NULL;
IMFMediaType *pMediaTypeIn = NULL;
IMFAttributes *pAttrib = NULL;
DWORD streamIndex = 1;
HRESULT hr = MFCreateSinkWriterFromMediaSink(pMediaSink, NULL, &pSinkWriter);
//input : audio
if (SUCCEEDED(hr))
{
hr = MFCreateMediaType(&pMediaTypeIn);
}
if (SUCCEEDED(hr))
{
hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, AUDIO_MAJOR_TYPE);
}
if (SUCCEEDED(hr))
{
hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, AUDIO_INPUT_FORMAT);
}
if (SUCCEEDED(hr))
{
hr = pMediaTypeIn->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, AUDIO_BITS_PER_SAMPLE);
}
if (SUCCEEDED(hr))
{
hr = pMediaTypeIn->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, AUDIO_SAMPLES_PER_SECOND);
}
if (SUCCEEDED(hr))
{
hr = pMediaTypeIn->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, AUDIO_NUM_CHANNELS);
}
if (SUCCEEDED(hr))
{
hr = pSinkWriter->SetInputMediaType(streamIndex, pMediaTypeIn, NULL);
}
//Tell the Audio SinkWriter to begin data treatment
if (SUCCEEDED(hr))
{
hr = pSinkWriter->BeginWriting();
}
//Possible error codes
if (FAILED(hr))
{
if (hr == MF_E_INVALIDMEDIATYPE)
UINT32 uiShutDown = 0;
if (hr == MF_E_INVALIDSTREAMNUMBER)
UINT32 uiShutDown = 1;
if (hr == MF_E_TOPO_CODEC_NOT_FOUND)
UINT32 uiShutDown = 2;
}
//Returns the pointer of the caller
if (SUCCEEDED(hr))
{
*ppWriter = pSinkWriter;
(*ppWriter)->AddRef();
*pStreamIndex = streamIndex;
}
//Release pointers
SafeRelease(&pSinkWriter);
SafeRelease(&pMediaTypeOut);
SafeRelease(&pMediaTypeIn);
SafeRelease(&pAttrib);
return hr;
}
//Write a video frame
HRESULT WriteVideoFrame(IMFSinkWriter *pWriter, DWORD streamIndex, const LONGLONG& rtStart, const LONGLONG& rtDuration)
//Write an audio packet
HRESULT WriteAudioPacket(IMFSinkWriter *pWriter, DWORD streamIndex, const LONGLONG& rtStart, const LONGLONG& rtDuration, UINT32 Quantity)
{
IMFSample *pSample = NULL;
IMFMediaBuffer *pBuffer = NULL;
const DWORD cbBuffer = Quantity * 2;
BYTE *pData = NULL;
//Create a new memory buffer, whose max. size is cbBuffer (QuantityOfSamplesPerVideoFrame * 2 Bytes)
HRESULT hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer);
//Lock the buffer and copy the audio packet to the buffer
if (SUCCEEDED(hr))
{
hr = pBuffer->Lock(&pData, NULL, NULL);
}
if (SUCCEEDED(hr))
{
for (DWORD n = 0; n < cbBuffer; n++)
{
pData[n] = (BYTE)(rand() & 0xFF); //generation of random noise
}
}
if (SUCCEEDED(hr))
{
hr = pBuffer->Unlock();
}
// Set the data length of the buffer
if (SUCCEEDED(hr))
{
hr = pBuffer->SetCurrentLength(cbBuffer);
}
//Create a media sample and add the buffer to the sample
if (SUCCEEDED(hr))
{
hr = MFCreateSample(&pSample);
}
if (SUCCEEDED(hr))
{
hr = pSample->AddBuffer(pBuffer);
}
//Set the time stamp and the duration
if (SUCCEEDED(hr))
{
hr = pSample->SetSampleTime(rtStart);
}
if (SUCCEEDED(hr))
{
hr = pSample->SetSampleDuration(rtDuration);
}
//Send the sample to the Sink Writer
if (SUCCEEDED(hr))
{
hr = pWriter->WriteSample(streamIndex, pSample);
}
//Release pointers
SafeRelease(&pSample);
SafeRelease(&pBuffer);
return hr;
}
// MAIN FUNCTION
void main()
{
HRESULT hr = S_OK;
IMFByteStream *spByteStream = NULL;
IMFMediaSink *pMediaSink = NULL;
IMFSinkWriter *spSinkWriterVid = NULL;
IMFSinkWriter *spSinkWriterAud = NULL;
IMFMediaType *spVideo = NULL;
IMFMediaType *spAudio = NULL;
//IMFMediaEventGenerator *spMFMediaEvtGene = NULL;
//IMFMediaEvent *spMFMediaEvent = NULL;
IMFAttributes *spAttrib = NULL;
DWORD sindexVid = 0, sindexAud = 0, j = 0;
LPCWSTR str = L"outputfile.mp4";
hr = CoInitialize(NULL);
if (SUCCEEDED(hr))
{
hr = MFStartup(MF_VERSION);
if (SUCCEEDED(hr))
{
spByteStream = CreateFileByteStream(str);
if (spByteStream != NULL)
{
spVideo = CreateVideoProfile();
}
if (spVideo != NULL)
{
spAudio = CreateAudioProfile();
}
if (spAudio != NULL)
{
pMediaSink = CreateMediaSink(spByteStream, spVideo, spAudio);
}
if (pMediaSink != NULL)
{
hr = InitializeSinkWriterVideo(&spSinkWriterVid, &sindexVid, pMediaSink);
if (SUCCEEDED(hr))
{
LONGLONG rtStartVid = 0;
UINT64 rtDurationVid = 0;
/********************************************************
* VIDEO PART *
********************************************************/
//Calculate the average time per frame, for video
//MFFrameRateToAverageTimePerFrame(VIDEO_FPS, 1, &rtDurationVid);
//loop to treat all the pictures
for (DWORD i = 0; i < VIDEO_FRAME_COUNT; ++i, ++j)
{
//Picture pixels
for (DWORD k = 0; k < VIDEO_PELS; k++)
{
if (j>255)
j = 0;
videoFrameBuffer[k] = ((j << 16) & 0x00FF0000) | ((j << 8) & 0x0000FF00) | (j & 0x000000FF);
}
hr = WriteVideoFrame(spSinkWriterVid, sindexVid, rtStartVid, rtDurationVid);
if (FAILED(hr))
{
break;
}
//Update the time stamp value
rtStartVid += rtDurationVid;
}
//Finalization of writing with the Video SinkWriter
if (SUCCEEDED(hr))
{
hr = spSinkWriterVid->Finalize();
}
}
}
SafeRelease(&spVideo);
SafeRelease(&spSinkWriterVid);
if (SUCCEEDED(hr))
{
hr = InitializeSinkWriterAudio(&spSinkWriterAud, &sindexAud, pMediaSink);
if (SUCCEEDED(hr))
{
LONGLONG rtStartAud = 0;
UINT64 rtDurationAud;
double QtyAudioSamplesPerVideoFrame = 0;
//Calculate the approximate quantity of samples, according to a video frame duration
//44100 Hz -> 1 s
//????? Hz -> 0.04 s (= 40 ms = one video frame duration)
if (VIDEO_FPS != 0)
QtyAudioSamplesPerVideoFrame = ((double)AUDIO_SAMPLES_PER_SECOND / (double)VIDEO_FPS);
else
QtyAudioSamplesPerVideoFrame = 0;
MFFrameRateToAverageTimePerFrame(VIDEO_FPS, 1, &rtDurationAud); //we treat the same duration as the video
//it means that we will treat N audio packets for the last of one picture (=40 ms)
//loop to treat all the audio packets
if (rtDurationAud != 0)
{
for (DWORD i = 0; i < VIDEO_FRAME_COUNT; ++i)
{
//Audio packets
hr = WriteAudioPacket(spSinkWriterAud, sindexAud, rtStartAud, rtDurationAud, (UINT32)QtyAudioSamplesPerVideoFrame);
if (FAILED(hr))
{
break;
}
//Update the time stamp value
rtStartAud += rtDurationAud;
}
//Finalization of writing with the Audio SinkWriter
if (SUCCEEDED(hr))
{
hr = spSinkWriterAud->Finalize();
}
}
}
}
//Release pointers
SafeRelease(&spByteStream);
SafeRelease(&spAudio);
SafeRelease(&spSinkWriterAud);
SafeRelease(&spAttrib);
//Shutdown the MediaSink (not done by the SinkWriter)
pMediaSink->Shutdown();
SafeRelease(&pMediaSink);
}
//Shutdown MediaFoundation
MFShutdown();
CoUninitialize();
}
//CDialog::OnOK();
}
Reference: How to add audio data to a video file created by a SinkWriter ? on MSDN Forums
On existing IMFSinkWriter you already have for video (e.g. using MSDN tutorial code) you call AddStream and SetInputMediaType initializing additional stream for audio, resulting in audio track in produced MP4 file. You obtain a separate stream index for audio, which you are to use in consequent calls. Similar to WriteSample for video, you use WriteSample for audio with respective stream index and add audio data for encoding/writing in the main loop of your application.