Cannot retrieve shader binary - c++

I'm having problem retrieving shader program's binary.
I made a class named "Building" that has the following static members:
static GLenum binary_format;
static unsigned char * program_binary;
static GLsizei binary_size;
static GLsizei binary_length;
I use program_binary to store the shader program,
I made these members static in order to retrive their values from
any instance of the class, once the first instance fills them with proper values.
This class also has the following member method, which I use as routine to setup the shader program :
void Building::setShaders()
{
if(program_binary == nullptr)
{
shader_prog = glCreateProgram();
GLchar const *vss[]=
{
*smanager.getVShaderSource(0),
*smanager.getVShaderSource(1)
};
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs,2,vss,NULL);
glCompileShader(vs);
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs,1,smanager.getFShaderSource(0),NULL);
glCompileShader(fs);
//CHECKING COMPILE STATUS
GLint vs_status;
glGetShaderiv(vs,GL_COMPILE_STATUS,&vs_status);
GLint fs_status;
glGetShaderiv(fs,GL_COMPILE_STATUS,&fs_status);
if(vs_status == 0 || fs_status == 0)
{
if(vs_status == 0)
{
GLint log_length;
glGetShaderiv(vs,GL_INFO_LOG_LENGTH,&log_length);
char log[log_length];
glGetShaderInfoLog(vs,log_length,NULL,log);
printVShaderLog(log);
}
if(fs_status == 0)
{
GLint log_length;
glGetShaderiv(fs,GL_INFO_LOG_LENGTH,&log_length);
char log[log_length];
glGetShaderInfoLog(fs,log_length,NULL,log);
printFShaderLog(log);
}
}
else
{
glAttachShader(shader_prog,vs);
glAttachShader(shader_prog,fs);
glProgramParameteri(shader_prog,GL_PROGRAM_BINARY_RETRIEVABLE_HINT,GL_TRUE);
glLinkProgram(shader_prog);
GLint param;
glGetProgramiv(shader_prog,GL_LINK_STATUS,&param);
if(param)
{
printShaderLinkState(param,NULL);
glGetProgramiv(shader_prog,GL_PROGRAM_BINARY_LENGTH,&binary_size);
std::cerr<<"binary_size is "<<binary_size<<std::endl;
//binary_size is zero !
program_binary = new unsigned char [binary_size];
glGetProgramBinary(shader_prog,binary_size,&binary_length,&binary_format,program_binary);
}
else
{
GLint logLength;
glGetProgramiv(shader_prog,GL_INFO_LOG_LENGTH,&logLength);
char log[logLength];
glGetProgramInfoLog(shader_prog,logLength,NULL,log);
printShaderLinkState(param,log);
}
}
glDeleteShader(vs);
glDeleteShader(fs);
}//if(program_binary == nullptr)
else
glProgramBinary(shader_prog,binary_format,program_binary,binary_length);
}
The problem is that the length provided by glGetProgramiv(shader_prog,GL_PROGRAM_BINARY_LENGTH,&binary_size);
is zero.However, I tried to put binary_size = 100 just to see if something is written into the program_binary pointer, but the function glGetProgramBinary(shader_prog,binary_size,&binary_length,&binary_format,program_binary); isn't working either, in fact program_binary remains just empty. I have no clues why this is happening .

Related

DirectX 11 Shader Reflection: Need help setting Constant Buffers variables by name

I'm creating a system that allows the manipulation of constant buffer variables by name using their byte offsets and byte sizes via Shader Reflection. I can bind the buffers to the Device Context just fine, but none of the cubes in my 3D scene are showing up. I believe this is because something is wrong with how I'm mapping data to the Constant Buffers. I cast the struct, be it a float4 or a matrix to a void pointer, and then copy that void pointer data to a another structure that has the variable's name. Once the shader needs to have its buffers updated before a draw call, I map the data of every structure in the list during the Map/Unmap call with a pointer iterator. Also, there seems to be a crash whenever the program calls the destructor on one of the shader constant structures. Below is the code i've written so far:
I'm mapping the buffer data through this algorithm here:
void DynamicConstantBuffer::UpdateChanges(ID3D11DeviceContext* pDeviceContext)
{
D3D11_MAPPED_SUBRESOURCE mappedResource;
HRESULT hr = pDeviceContext->Map(m_Buffer.Get(), 0, D3D11_MAP_WRITE_DISCARD, 0, &mappedResource);
if (FAILED(hr)) return;
// Set mapped data
for (const auto& constant : m_BufferConstants)
{
char* startPosition = static_cast<char*>(mappedResource.pData) + constant.desc.StartOffset;
memcpy(startPosition, constant.pData, sizeof(constant.pData));
}
// Copy memory and unmap
pDeviceContext->Unmap(m_Buffer.Get(), 0);
}
I'm initializing the Constant Buffer here:
BOOL DynamicConstantBuffer::Initialize(UINT nBufferSlot, ID3D11Device* pDevice, ID3D11ShaderReflection* pShaderReflection)
{
ID3D11ShaderReflectionConstantBuffer* pReflectionBuffer = NULL;
D3D11_SHADER_BUFFER_DESC cbShaderDesc = {};
// Fetch constant buffer description
if (!(pReflectionBuffer = pShaderReflection->GetConstantBufferByIndex(nBufferSlot)))
return FALSE;
// Get description
pReflectionBuffer->GetDesc(&cbShaderDesc);
m_BufferSize = cbShaderDesc.Size;
// Create constant buffer on gpu end
D3D11_BUFFER_DESC cbDescription = {};
cbDescription.Usage = D3D11_USAGE_DYNAMIC;
cbDescription.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
cbDescription.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
cbDescription.ByteWidth = cbShaderDesc.Size;
if (FAILED(pDevice->CreateBuffer(&cbDescription, NULL, m_Buffer.GetAddressOf())))
return FALSE;
// Poll shader variables
for (UINT i = 0; i < cbShaderDesc.Variables; i++)
{
ID3D11ShaderReflectionVariable* pVariable = NULL;
pVariable = pReflectionBuffer->GetVariableByIndex(i);
// Get variable description
D3D11_SHADER_VARIABLE_DESC variableDesc = {};
pVariable->GetDesc(&variableDesc);
// Push variable back into list of variables
m_BufferConstants.push_back(ShaderConstant(variableDesc));
}
return TRUE;}
Here's the method that sets a variable within the constant buffer:
BOOL DynamicConstantBuffer::SetConstantVariable(const std::string& varName, const void* varData)
{
for (auto& v : m_BufferConstants)
{
if (v.desc.Name == varName)
{
memcpy(v.pData, varData, sizeof(varData));
bBufferDirty = TRUE;
return TRUE;
}
}
// No variable to assign :(
return FALSE;
}
Here's the class layout for ShaderConstant:
class ShaderConstant
{
public:
ShaderConstant(D3D11_SHADER_VARIABLE_DESC& desc)
{
this->desc = desc;
pData = new char[desc.Size];
_size = desc.Size;
};
~ShaderConstant()
{
if (!pData)
return;
delete[] pData;
pData = NULL;
}
D3D11_SHADER_VARIABLE_DESC desc;
void* pData;
size_t _size;
};
Any help at all would be appreciated. Thank you.

OpenGL showing blank screen. Maybe due to shaders?

My OpenGL ES application isn't working. I'm using SDL for windowing management, and it holds the context. After looking around, I noticed that the vertex shader and the fragment shader showed up as 0 on the debugger. Even the program was 0. Could this be a reason? I followed my shader compiling and linking code to a template that was previously made.
If it is, what is wrong? Here is the code:
GLuint ShaderHelper::compileShader(GLenum type, std::string fileName) {
std::string fileContents;
std::ifstream fin;
std::string path;
// Getting the necessary path...
// These are abstractions for getting file contents
// from the main bundle.
if (type == GL_VERTEX_SHADER) {
FileOpener opener;
path = opener.retriveFileFromBundle(fileName, "vsh");
} else if (type == GL_FRAGMENT_SHADER) {
FileOpener opener;
path = opener.retriveFileFromBundle(fileName, "fsh");
} else {
std::cout << "ERROR: Invalid shader type at filename " << fileName << std::endl;
exit(1);
}
fin.open(path);
if (!fin.is_open()) {
std::cout << "ERROR: Failed to open file " << fileName << std::endl;
exit(1);
}
// Retrieving the string from the file...
while (!fin.eof()) {
char CLine[255];
fin.getline(CLine, 255);
std::string line = CLine;
fileContents = fileContents + line;
fileContents = fileContents + " \n";
}
fin.close();
// I'm creating these variables because a pointer is needed for
// glShaderSource
GLuint shaderHandle = glCreateShader(type);
const GLint shaderStringLength = (GLint)fileContents.size();
const GLchar *shaderCString = fileContents.c_str();
glShaderSource(shaderHandle, 1, &shaderCString, &shaderStringLength);
glCompileShader(shaderHandle);
return shaderHandle;
}
void ShaderHelper::linkProgram(std::vector<GLuint *> shaderArray) {
program = glCreateProgram();
for (int i = 0; i < shaderArray.size(); i++) {
glAttachShader(program, *shaderArray[i]);
}
glLinkProgram(program);
}
void ShaderHelper::addUniform(uniform_t uniform) {
std::string name = uniform.name;
uniforms[name] = uniform;
// With that step done, we need to assign the location...
uniforms[name].location = glGetUniformLocation(program, uniforms[name].name.c_str());
}
EDIT: After suggestions, I ran my code through glError(). I fixed an error, but I still got a blank screen. I'm no longer getting 0 as my shader values. I set glClearColor to a white image, and it's just appearing pure white. I adjusted numbers in the MV matrix and projection matrix, but there's still nothing at all. I disabled face culling, but still nothing. Also, shaders are compiling and linking fine. So now what?
The dreaded blank screen can be caused by a variety of problems:
Your context is not created correctly or you're not presenting your scene properly. Does changing your clear color to something else than black show anything?
Your transformation matrix is wrong. How are you setting up the position of the "camera"? Are you using something like GLM to set up a matrix? If so, have a look at glm::perspective() and glm:::lookAt(). Make sure you're passing the matrix to the shader and that you're using it to set gl_Position in your vertex shader.
The geometry you're trying to display is facing away from the viewer. Try glDisable(GL_CULL_FACE). If it works, reverse the order of your vertices.
An OpenGL call is failing somewhere. Make sure you check glGetError() after every call. I usually have something like the following:
struct gl_checker
{
~gl_checker()
{
const auto e = glGetError();
assert(e == GL_NO_ERROR);
}
};
template <class F>
inline auto gl_call(F f) -> decltype(f())
{
gl_checker gc;
return f();
}
which can be used like this:
gl_call([]{ glDisable(GL_CULL_FACE); });
Your shaders fail to compile or link. Have a look at glGetShaderiv() with GL_COMPILE_STATUS and glGetProgramiv() with GL_LINK_STATUS. If they report an error, have a look at glGetShaderInfoLog() and glGetProgramInfoLog().
As for the partial code you provided, I see nothing strictly wrong with it. Providing the shaders and a smaller, complete program might help with finding the problem.

Opengl error 1281 when trying to glUseProgram

Any ideas how I could debug this opengl error further? 1281
I'm loading source from files, compiling, linking and then trying to check for errors after glUseProgram
In my object's draw method ..
log.info(gl2.glIsProgram(shaderProgram)); // true
gl2.glUseProgram(shaderProgram);
int error;
while ((error = gl2.glGetError()) != GL2.GL_NO_ERROR) {
throw new RuntimeException("glUseProgram" + ": glError " + error);
}
Output ..
[13:38:08] INFO (IARectangle.java:99) - true
java.lang.RuntimeException: glUseProgram: glError 1281
This is how I load my shader source, from .glsl files ..
Vector<Integer> shaders = new Vector<Integer>();
try {
shaders.add(compileSource(
loadSource("shaders/vertexShader.glsl"),
loadSource("shaders/fragmentShader.glsl")));
return shaders;
} catch (Exception e) {
e.printStackTrace();
return shaders;
}
public String[] loadSource(String filename){
StringBuilder sb = new StringBuilder();
try {
InputStream is = getClass().getClassLoader().getResourceAsStream(filename);
BufferedReader br = new BufferedReader(new InputStreamReader(is, "UTF-8"));
String line;
while ((line = br.readLine()) != null) {
sb.append(line);
sb.append('\n');
}
is.close();
} catch (Exception e) {
e.printStackTrace();
}
return new String[] { sb.toString() };
}
public final int compileSource(final String[] vertexSource, final String[] fragmentSource) throws Exception {
vertexShaderProgram;
int fragmentShaderProgram;
int shaderProgram;
// load vertexShader source, compile and verify
vertexShaderProgram = gl2.glCreateShader(GL2.GL_VERTEX_SHADER);
gl2.glShaderSource(vertexShaderProgram, 1, vertexSource, null, 0);
gl2.glCompileShader(vertexShaderProgram);
verifyCompile(gl2, vertexShaderProgram);
// load fragmentShader source, compile and verify
fragmentShaderProgram = gl2.glCreateShader(GL2.GL_FRAGMENT_SHADER);
gl2.glShaderSource(fragmentShaderProgram, 1, fragmentSource, null, 0);
gl2.glCompileShader(fragmentShaderProgram);
verifyCompile(gl2, fragmentShaderProgram);
shaderProgram = gl2.glCreateProgram();
gl2.glAttachShader(shaderProgram, vertexShaderProgram);
gl2.glAttachShader(shaderProgram, fragmentShaderProgram);
gl2.glLinkProgram(shaderProgram);
IntBuffer intBuffer = IntBuffer.allocate(1);
gl2.glGetProgramiv(shaderProgram, GL2.GL_LINK_STATUS, intBuffer);
if (intBuffer.get(0) != 1){
String infoLog = null;
gl2.glGetProgramiv(shaderProgram, GL2.GL_INFO_LOG_LENGTH, intBuffer);
int size = intBuffer.get(0);
log.error("Program link error: ");
if (size > 0) {
ByteBuffer byteBuffer = ByteBuffer.allocate(size);
gl2.getGL2().glGetProgramInfoLog(shaderProgram, size, intBuffer, byteBuffer);
byte[] sizeBytes = new byte[size];
byteBuffer.get(sizeBytes, 0, size);
infoLog = new String(sizeBytes);
log.error("info: " + infoLog);
} else {
log.error("Unknown");
}
System.exit(1);
return shaderProgram;
} else {
return shaderProgram;
}
}
Vertex shader source ..
#version 120
uniform mat4 uMVPMatrix;
attribute vec4 vPosition;
void main() {
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
//gl_Position = uMVPMatrix * vPosition;
}
Fragment shader source ..
#version 120
uniform vec4 vColor;
void main() {
gl_FragColor = vColor;
}
At first glance, I'm not sure why glGetError is returning an error code. But to answer your specific question of 'How can I debug this error further?', I do have a suggestion.
Change your draw code to this:
// Logging errors before the call to glUseProgram
int error;
while ((error = gl2.glGetError()) != GL2.GL_NO_ERROR) {
log.info(error);
}
log.info(gl2.glIsProgram(shaderProgram)); // true
gl2.glUseProgram(shaderProgram);
int error;
while ((error = gl2.glGetError()) != GL2.GL_NO_ERROR) {
throw new RuntimeException("glUseProgram" + ": glError " + error);
}
Note that the difference here is we've added a block of code to log errors returned by glGetError before the call to glUseProgram. The reason is because the error is not necessarily originating in your call to glUseProgram. If you see the 1281 error logged using the code above, you can determine that the error is actually originating from an OpenGL call made before the glUseProgram call.
Take a look at the documentation for glGetError:
glGetError returns the value of the error flag. Each detectable error
is assigned a numeric code and symbolic name. When an error occurs,
the error flag is set to the appropriate error code value. No other
errors are recorded until glGetError is called, the error code is
returned, and the flag is reset to GL_NO_ERROR.
So, if one of your earlier OpenGL calls (perhaps something in your compileSource function, for example) recorded the 1281 error, and you did not call glGetError anywhere between that point and your call to glUseProgram, you can not reasonably assume that the error is actually originating from the glUseProgram call.
In summary, glGetError does not return the most recent error recorded by an OpenGL call. You need to call glGetError with more granularity in order to pinpoint where the error is originating. This will allow you to troubleshoot the issue you're having and determine exactly which OpenGL call is recording the error.

static variables in switch statements

I am working on a keil project using c. In one of my files, I have defined some variables at the top as static. I use them in some functions in this file and I also use them in a switch/case statement. For example:
at the top where variables are defined:
static uint8_t* imageData = 0;
static uint32_t height = 0;
static uint32_t width = 0;
static uint32_t stride = 0;
in the middle of the code:
switch (pMsg->MsgId)
{
case WM_CREATE:
{
CAMERA_Init();
AMProcessor *processor = new AMProcessor();
struct block blocks[2] = {
{2, 240, 160},
{2, 160, 120}
};
processor->initBlocks(blocks, 2);
stride = 480; //sample_image.width;
processor->Initialize(480, 272, 480, InputTypes::CHROMA_MOBILE, InputTypes::RGB);
BSP_LED_Toggle(LED3);
while(1){
const PictureOutput* picOut = processor->ProcessImage((uint8_t *)CAMERA_FRAME_BUFFER);
break;
}
}
and near the bottom I have a couple of functions that also use these variables.
However, I am getting warnings that are telling me that the 4 variables defined at the top are initialized but never referenced. If they are not static then I do not get this warning, but I get a hard fault error (which I am trying to get rid of).
So my question is, why are these not referenced? It obviously has something to do with the static definition, but how come the static definition wouldn´t allow these to be referenced?
for clarification: I am getting this message on all of them, even stride.
warning: #177-D: variable "imageData" was declared but never referenced
I have a function at the bottom that uses all of these variables that looks like this:
bool ReadImageFromPgmFile(const char* pFileName, uint32_t &height, uint32_t &width, uint8_t*& ImgData) {
if (pFileName == 0) {
return false;
};
// read data from file
if (strstr(pFileName, ".pgm") || strstr(pFileName, ".PGM")) {
FILE *pPgmFile = fopen(pFileName, "r");
if (pPgmFile == NULL) {
fprintf(stderr, "Cannot open PGM file '%s'.\n", pFileName);
return false;
};
char x = fgetc(pPgmFile);
char y = fgetc(pPgmFile);
if (x != 'P' || y != '5') {
fprintf(stderr, "Invalid PGM file '%s'.\n", pFileName);
return false;
};
uint32_t maxvalue;
fscanf(pPgmFile, "%d", &width);
fscanf(pPgmFile, "%d", &height);
fscanf(pPgmFile, "%d", &maxvalue);
if (maxvalue > 255) {
fprintf(stderr, "File '%s' has incorrect format.\nOnly 8-bit PGMs are supported by this reader.\n", pFileName);
return false;
};
ImgData = new uint8_t[width*height];
memset(ImgData, 0, width*height);
fgetc(pPgmFile); // skip new line character
uint32_t nPixelsRead = fread(ImgData, 1, width * height, pPgmFile);
fclose(pPgmFile);
if (nPixelsRead != width * height) {
fprintf(stderr, "PGM file '%s' does not contain all pixels.\n", pFileName);
return false;
};
return true;
}
return false;
};
The method declaration hides the static height and width variables.
bool ReadImageFromPgmFile(const char* pFileName,
uint32_t &height,
uint32_t &width,
uint8_t*& ImgData) {
Use of height and width in this method will refer to the local parameters and not the static variables. I cannot see any reference to imageData, though there is an ImgData parameter that is used.
The static keyword in this context means the variables are only visible to the compilation unit it is declared in. Removing the static keyword makes it a global variable; accessible by the whole program. The compiler is unable (or unwilling) to reason about the usage of globals so you do not get the warnings.

Reading from a file to a string doesn't work as expected

I'm currently writing my first programm in OpenGl and C/C++. I came across the following problem:
I wrote a method which reads a file (in my case containing the vertex or fragment shader) and returns the content as one string.
std::string loadShader(const char* filepath)
{
if(filepath){
std::ifstream ifs(filepath, std::ifstream::in);
std::ostringstream oss;
std::string temp;
while(ifs.good()){
getline(ifs, temp);
oss << temp << '\n';
std::cout << temp << std::endl;
}
ifs.close();
return oss.str();
}
else{
exit(EXIT_FAILURE);
}
}
I've two files: vertexShader.glsl and fragmentShader.glsl which I want to convert to two seperate strings for the creation of the shaders. The code above is called in the following way:
void createShaders(void){
GLenum errorCheckValue = glGetError();
const GLchar* vertexShader = loadShader("vertexShader.glsl").c_str();
const GLchar* fragmentShader = loadShader("fragmentShader.glsl").c_str();
vertexShaderId = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShaderId, 1, &vertexShader, NULL);
glCompileShader(vertexShaderId);
fragmentShaderId = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShaderId, 1, &fragmentShader, NULL);
glCompileShader(fragmentShaderId);
// some other code follows...
}
I debugged the code above and noticed that the vertexShader const char* is empty, while the fragmentShader const char* contains the correct code (the content from the file I just converted to a string).
I don't understand the inconsistency in the method. I can't figure out, how one shader is parsed the right way and the other is empty.
I hope somebody can figure it out.
Thank you very much.
The problem is that your unnamed std::string returned from loadShader() only exists within the scope of the expression:
const GLchar* vertexShader = loadShader("vertexShader.glsl").c_str();
Quoting the standard:
Temporary objects are destroyed as the last step in evaluating the full-expression (1.9) that (lexically) contains the point where they were created.
The fix is simple:
const std::string vertexShaderStr = loadShader("vertexShader.glsl");
const GLchar *vertexShader = vertexShaderStr.c_str();
const std::string fragmentShaderStr = loadShader("fragmentShader.glsl");
const GLchar *fragmentShader = fragmentShaderStr.c_str();
Change this:
const GLchar* vertexShader = loadShader("vertexShader.glsl").c_str();
const GLchar* fragmentShader = loadShader("fragmentShader.glsl").c_str();
To this:
std::string vertexShaderString = loadShader("vertexShader.glsl");
std::string fragmentShaderString = loadShader("fragmentShader.glsl");
const GLchar* vertexShader = vertexShaderString.c_str();
const GLchar* fragmentShader = fragmentShaderString.c_str();