Problems with Rendering a quad in OpenGL by using indeces

59 views Asked by At

I want to draw a quad with OpenGL, but it doesn't work, since I introduced indexing. Without indexes my code worked fine and I could use glDrawArrays :

#define GLEW_STATIC
#include <GL\glew.h>
#include <GLFW\glfw3.h>
#include <GL\glew.h>
#include <glm.hpp>
#include <iostream>
#include <fstream>
#include <string>
#include <vector>

#define WIDTH 800
#define HEIGHT 600
#define TITLE "Dynamic"

GLFWwindow* window;
int vaoID;


std::vector<float> vertices = {-0.5f, 0.5f, 0,      -0.5f, -0.5f, 0,        0.5f, -0.5f, 0, 0,      0.5f, 0.5f, 0};
std::vector<int> indices = { 0, 1, 3,   3, 1, 2 };
void loadToVAO(std::vector<float> vertices, std::vector<int> indices);

void update() {
    loadToVAO(vertices, indices);
    while (!glfwWindowShouldClose(window)) {
        glfwPollEvents();
        glClear(GL_COLOR_BUFFER_BIT);
        glClearColor(1, 0, 0, 1);
        //glDrawArrays(GL_TRIANGLES, 0, 6);
        glDrawElements(GL_TRIANGLES, 6, GL_INT, 0);
        glfwSwapBuffers(window);
    }
}

int main() {
    if (!glfwInit())
        std::cout << "Couldn't initialize GLFW!" << std::endl;

    window = glfwCreateWindow(WIDTH, HEIGHT, TITLE, NULL, NULL);
    glfwMakeContextCurrent(window);
    glfwSwapInterval(1);

    if (GLEW_OK != glewInit())
        std::cout << "GLEW is not working!" << std::endl;

    std::cout << "Your GL version: " << glGetString(GL_VERSION) << std::endl;
    //glEnable(GL_CULL_FACE);
    //glCullFace(GL_BACK);
    update();
}

void loadToVAO(std::vector<float> vertices, std::vector<int> indices) {
    GLuint vertexVBO;
    GLuint indicesVBO;
    GLuint vaoID;
    glGenBuffers(1, &vertexVBO);
    glGenVertexArrays(1, &vaoID);
    glBindVertexArray(vaoID);
    glGenBuffers(1, &indicesVBO);
    glBindBuffer(GL_ARRAY_BUFFER, vertexVBO);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indicesVBO);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices) * sizeof(int), &indices[0], GL_STATIC_DRAW);
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices) * sizeof(float), &vertices[0], GL_STATIC_DRAW);
    glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
    glEnableVertexAttribArray(0);
}
1

There are 1 answers

0
BDL On BEST ANSWER

Again, the sizeof operator is the problem since it returns the size of the underlying type and not the size of some data a pointer points to. In the line

glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices) * sizeof(int), &indices[0], GL_STATIC_DRAW);

sizeof(indices) == sizeof(std::vector<int>) which is the size of the vector object and not the size of the data contained in the vector. The correct code here would be to use indices.size() which returns the number of elements in the vector:

glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(int), &indices[0], GL_STATIC_DRAW);

The same goes for the upload of the vertices.

Edit: You should also not pass vectors by value unless it is absolutely necessary. Atm the content of both vectors is copied when you pass them to loadToVAO. If you change the function signature to

void loadToVAO(std::vector<float>& vertices, std::vector<int>& indices)

the vectors are passed by reference and the data is not copied.