Fract Fract - 2 months ago 15
C++ Question

How to implement iGlobalTime for a GLSL fragment shader in C++?

Currently I have a tiny C++ application which display a triangle by these two shaders:

Vertex shader:

static const char *vertex_shader_glsl = \
"#version 430\n"

"layout (location=0)in vec2 inVer;"
"out vec2 p;"
"out gl_PerVertex"
"{"
"vec4 gl_Position;"
"};"

"void main()"
"{"
"gl_Position=vec4(inVer,0.0,1.0);"
"p=inVer;"
"}";


Fragment shader:

#version 430


out vec4 fragColor;
in vec2 fragCoord;
vec2 iResolution;
//uniform float iGlobalTime;
float a = -2;

void main()
{
a = sin(-2);
//Colors
vec3 fgColor = vec3(0.741, 0.635, 0.471);
vec3 bgColor = vec3(0.192, 0.329, 0.439);

//Triangle barycentric coordinates defined on screen space


vec2 t0 = vec2(0.25, 0);
vec2 t1 = vec2(0.75, 0.25);
vec2 t2 = vec2(0.50, 0.85);
vec2 tCentroid = (t0 + t1 + t2)/3.0;
//Compute UV coordinates
vec2 uv = fragCoord.xy;

vec2 v0 = t2 - t0;
vec2 v1 = t1 - t0;
vec2 v2 = uv - t0;

//Compute barycentric coordinates
float dot00 = dot(v0, v0);
float dot01 = dot(v0, v1);
float dot02 = dot(v0, v2);
float dot11 = dot(v1, v1);
float dot12 = dot(v1, v2);

float invDenom = 1.0/(dot00 * dot11 - dot01 * dot01);
float baryX = (dot11 * dot02 - dot01 * dot12) * invDenom;
float baryY = (dot00 * dot12 - dot01 * dot02) * invDenom;

if((baryX >= 0.0) && (baryY >= 0.0) && (baryX + baryY <= 1.0)) {
fragColor = vec4(fgColor, 1.0);
} else {
fragColor = vec4(bgColor, 1.0);
}
}


This two shaders display a simple triangle:

enter image description here

However I would like to rotate this triangle so I would need some timer implmemntation (iGlobalTime).

I mean, I know that I would need something like this:
https://www.shadertoy.com/view/Xtt3WX

However I cannot copy-paste the code from the link above, since I'm unable to use iGlobalTime in my C++ project. How to update the shaders above to be able to rotate the triangle based on the system time? How to implement iGlobalTime in a very basic C++ application?

===========================

EDITED



I've updated:

ext.h:

...
#ifdef DEBUG
#define NUMFUNCTIONS 14 // <<== HERE
#else
#define NUMFUNCTIONS 12 // <<== HERE
#endif

extern void *myglfunc[NUMFUNCTIONS];

#define glCreateShaderProgramv ((PFNGLCREATESHADERPROGRAMVPROC)myglfunc[0])
#define glGenProgramPipelines ((PFNGLGENPROGRAMPIPELINESPROC)myglfunc[1])
#define glBindProgramPipeline ((PFNGLBINDPROGRAMPIPELINEPROC)myglfunc[2])
#define glUseProgramStages ((PFNGLUSEPROGRAMSTAGESPROC)myglfunc[3])
#define glProgramUniform4fv ((PFNGLPROGRAMUNIFORM4FVPROC)myglfunc[4])
#define glGenFramebuffers ((PFNGLGENFRAMEBUFFERSPROC)myglfunc[5])
#define glBindFramebuffer ((PFNGLBINDFRAMEBUFFERPROC)myglfunc[6])
#define glTexStorage2D ((PFNGLTEXSTORAGE2DPROC)myglfunc[7])
#define glDrawBuffers ((PFNGLDRAWBUFFERSPROC)myglfunc[8])
#define glFramebufferTexture ((PFNGLFRAMEBUFFERTEXTUREPROC)myglfunc[9])
#define glProgramUniform1f ((PFNGLPROGRAMUNIFORM1FPROC)myglfunc[10]) // <<== ADDED NEW LINE HERE
#define glGetUniformLocation ((PFNGLGETUNIFORMLOCATIONPROC)myglfunc[11]) // <<== ADDED NEW LINE HERE

#ifdef DEBUG
#define glGetProgramiv ((PFNGLGETPROGRAMIVPROC)myglfunc[12])
#define glGetProgramInfoLog ((PFNGLGETPROGRAMINFOLOGPROC)myglfunc[13])
#endif
...


updated ext.cpp:

...
static char *strs[] = {
"glCreateShaderProgramv",
"glGenProgramPipelines",
"glBindProgramPipeline",
"glUseProgramStages",
"glProgramUniform4fv",
"glProgramUniform1fv",
"glGenFramebuffers",
"glBindFramebuffer",
"glTexStorage2D",
"glDrawBuffers",
"glFramebufferTexture",
"glProgramUniform1f", // <<== ADDED NEW LINE HERE
"glGetUniformLocation", // <<== ADDED NEW LINE HERE

#ifdef DEBUG
"glGetProgramiv",
"glGetProgramInfoLog",
#endif
};
...


updated intro.cpp:

...
void intro_do(long time)
{
...
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glBindProgramPipeline(renderingPipeline);
// Set fparams to give input to shaders

// Render
glProgramUniform4fv(fragmentShader, 0, 4, fparams);
glProgramUniform1f(fragmentShader, glGetUniformLocation(fragmentShader, "iGlobalTime"), time*1000); // <<== ADDED NEW LINE HERE
glRects(-1, -1, 1, 1); // Deprecated. Still seems to work though.

...
}


Application now runns, but crashes immediatelly saying: "Unhandled exception at 0x7542CB49 in 4kIntro.exe: 0xC0000005: Access violation reading location 0x00000000."

Answer

You can do something like this (you need a c++11 compliant compiler)

#include <chrono>

int main()
{
    // init stuff

    // game loop
    using Clock = std::chronohigh_resolution_clock;
    using TimePoint = std::chrono::time_point<Clock>;

    TimePoint t0, t1;
    float global_time = 0;
    t0 = Clock::now();
    while (run_game)
    {
        // dt is the time since last frame
        // global_time is the total time since the start of the game loop
        t1 = Clock::now();
        float dt = std::chrono::duration_cast<std::chrono::duration<float, std::chrono::seconds::period>>(t1 - t0);
        global_time += dt;

        // Send your uniform and kickoff rendering
    }

    return 0;
}

Hope this helps (there might be some typos in the code I did not check it)

EDIT: It looks like you misunderstood some stuff so I will try to explain a bit more how stuff happens.

Shaders

According to the OpenGL wiki, a shader is "a user-defined program designed to run on some stage of a graphics processor which purpose is to execute one of the programmable stages of the rendering pipeline."

So what this means is that, when you ask the GPU to draw stuff (via a draw call , i.e. glDrawArrays or glDrawElements) it will call, for each vertex, the vertex shader, then, for each fragment, the fragment shader. (this is very simplified, for a more complete explanation about the whole pipeline you can refer to this OpenGL wiki).

Communication between the CPU and the GPU

Before calling glDrawArrays or glDrawElements you had first to send the GPU the vertices you wanted it to draw.

You did something in your c++ code like (this might not be the exact same code, don't worry about it)

GLfloat data[] = { ... } // The data you want to send
GLuint vao, vbo;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof data, data, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, (GLvoid*)0);

Here, you send some vertex array to the GPU and, on each element of it, your vertex shader will be executed, everytime you issue a drawcall. In it you output a gl_Position, which will then be rasterized by the GPU and input to the fragment (or pixel) shader. Here, for each fragment / pixel your shader will be executed.

"This is fine, but how do I send global data to the GPU ?" This is where uniforms come into play. You can send per-shader data to the GPU using them. This is done in your c++ code something like

GLuint location = glGetUniformLocation(program_id, "uniform_name");
glUniform_(location, uniform_data);

Since the OpenGL API is written in C, all the glUniform calls are postfixed by the type of data you want to send (see the docs for all the calls). Then, after sending the data, you are able to use it in the shader.

Be careful, declaring a uniform in a shader is not enough, you have to send data to the GPU too. This is the same as any variable in C++, as long as you do not affect it anything it won't have a reliable value.

In your case, since you want to send a float uniform, called iGlobalTime, you will have something like

GLuint time_loc = glUniformLocation(program_id, "iGlobalTime");
glUniform1f(time_loc, global_time)

The updated c++ code taking most of this into account is

#include <chrono>
int main()
{
    // init stuff
    // Compile your shader_program and store its id in program_id
    // Set the vertex data on the GPU

    // game loop
    using Clock = std::chronohigh_resolution_clock;
    using TimePoint = std::chrono::time_point<Clock>;

    TimePoint t0, t1;
    float global_time = 0;
    t0 = Clock::now();

    while (!quit)
    {
        // dt is the time since last frame
        // global_time is the total time since the start of the game loop
        t1 = Clock::now();
        float dt = std::chrono::duration_cast<std::chrono::duration<float, std::chrono::seconds::period>>(t1 - t0);
        global_time += dt;

        // Activate program
        glUseProgram(program_id);
        // Send global_time uniform
        glUniform1f(glGetUniformLocation(program_id, "iGlobalTime"), global_time);

        // Issue draw call
        glBindVertexArray(vao);
        glDrawArrays(GL_TRIANGLES, 0, 4);
        glBindVertexArray(0);
    }

    return 0;
}