poetryofruins poetryofruins - 1 month ago 6
C++ Question

Indexed drawing: data copied from a working example is displayed incorrectly

I have this piece of code:

#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <iostream>

void keyCallback(GLFWwindow* window, int key, int scancode, int action, int mods) {
if(key == GLFW_KEY_ESCAPE && action == GLFW_PRESS) {
glfwSetWindowShouldClose(window, GLFW_TRUE);
}
}

int main() {
glfwInit();

glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 5);

auto window = glfwCreateWindow(800, 600, "title", NULL, NULL);
glfwMakeContextCurrent(window);
glfwSetKeyCallback(window, keyCallback);

glewExperimental = true;
glewInit();

glViewport(0, 0, 800, 600);

static const char* vsSource[] = {
"#version 450 core\n"
"layout (location = 0) in vec3 position;"
"uniform mat4 mvp;"
"out vec3 pos;"
"void main() {"
"gl_Position = mvp * vec4(position, 1.0);"
"pos = position;"
"}"
};

static const char* fsSource[] = {
"#version 450 core\n"
"in vec3 pos;"
"out vec4 color;"
"void main() {"
"color = vec4(pos, 1.0) * 0.5 + 0.5;"
"}"
};

static const GLfloat clearColor[] = { 0.1f, 0.1f, 0.1f, 1.0f };

static const GLfloat vertices[] = {
-0.25f, -0.25f, -0.25f,
-0.25f, 0.25f, -0.25f,
0.25f, -0.25f, -0.25f,
0.25f, 0.25f, -0.25f,
0.25f, -0.25f, 0.25f,
0.25f, 0.25f, 0.25f,
-0.25f, -0.25f, 0.25f,
-0.25f, 0.25f, 0.25f,
};

static const GLushort indices[] = {
0, 1, 2,
2, 1, 3,
2, 3, 4,
4, 3, 5,
4, 5, 6,
6, 5, 7,
6, 7, 0,
0, 7, 1,
6, 0, 2,
2, 4, 6,
7, 5, 3,
7, 3, 1
};

auto vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, vsSource, NULL);
glCompileShader(vs);

auto fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, fsSource, NULL);
glCompileShader(fs);

auto program = glCreateProgram();
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);

int logLength = 1024;
char* log = new char[logLength];
glGetProgramInfoLog(program, logLength, &logLength, log);
std::cout << log << std::endl;

glUseProgram(program);

auto mvpLocation = glGetUniformLocation(program, "mvp");

glm::mat4 projection = glm::perspective(50.0f, 800.0f/600.0f, 0.1f, 1000.0f);
glm::mat4 model = glm::translate(glm::mat4(1.0f), glm::vec3(0.0, 0.0, -10.0));
glm::mat4 mvp;

GLuint vao, vbo, ibo;

glCreateVertexArrays(1, &vao);
glBindVertexArray(vao);

glCreateBuffers(1, &vbo);
glNamedBufferStorage(vbo, sizeof(vertices), vertices, GL_MAP_READ_BIT);
glVertexArrayVertexBuffer(vao, 0, vbo, 0, 2 * sizeof(GLfloat));
glVertexArrayAttribFormat(vao, 0, 3, GL_FLOAT, GL_FALSE, NULL);
glVertexArrayAttribBinding(vao, 0, 0);
glEnableVertexArrayAttrib(vao, 0);

glCreateBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glNamedBufferStorage(ibo, sizeof(indices), indices, GL_MAP_READ_BIT);

float timer = 0.0f;

static const GLfloat depthClear = 1.0f;

glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);

glEnable(GL_CULL_FACE);

glfwSwapInterval(1);

while(!glfwWindowShouldClose(window)) {
glClearBufferfv(GL_COLOR, 0, clearColor);
glClearBufferfv(GL_DEPTH, 0, &depthClear);
mvp = projection * model * glm::rotate(glm::mat4(), timer / 1000.0f, glm::vec3(0.0f, 1.0f, 1.0f));
glUniformMatrix4fv(mvpLocation, 1, GL_FALSE, glm::value_ptr(mvp));
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_SHORT, 0);
glfwPollEvents();
glfwSwapBuffers(window);
timer += 1.0f;
}

glfwTerminate();
return 0;
}


I compile it with:

clang++ -std=c++11 main.cpp -o cube -lGLEW -lglfw3 -lGL -gsplit-dwarf


And this is how it is displayed when I compile and run it (expecting a rotating cube):

incorrect indices

My question is: what happened here? Both the indices and vertices are copied from a similar, working example piece of code - i.e. they are the same.

I spent the last hour looking for any significant difference and the reason of the problem, but could not find it. The example code (from OpenGL SB7) compiles, runs and displays a cube. It uses older OpenGL apis (like
glBufferData
where I use
glNamedBufferStorage
).

I tried different sets of vertices and indices (from different examples on the web) and all of them are displayed like this. So I think it's not the data's fault, but there's something in my code that causes this. Unfortunately, I can't find what and where.

Answer
glVertexArrayVertexBuffer(vao, 0, vbo, 0, 2 * sizeof(GLfloat));
                                          ^ wat

Stride's wrong for vertcies. Should be 3:

glVertexArrayVertexBuffer(vao, 0, vbo, 0, 3 * sizeof(GLfloat));

I'm guessing the pre-geometry-transplant code was using 2D geometry.

Comments