Programmatically generate texture for WebGL in JavaScript

1.1k views Asked by At

I have the following code to programmatically generate a texture for WebGL in C. I need to do the same thing in JavaScript for WebGL.

How do I properly create the 3 dimensional checkerboard matrix?

GLubyte checkerboard[64][64][3];

How do I set the values?

checkerboard[i][j][0] = (GLubyte) c;

How do I properly call gl.texImage2D() with the generated texture in WebGL?

gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGB, 64, 64, 0, gl.RGB, gl.UNSIGNED_INT, checkerboard);

Here is the current code in C

void CreateTextures() {
    GLubyte checkerboard[64][64][3];
    int i, j, c;

    for (i=0; i<64; i++) {
        for (j=0; j<64; j++) {
            c = ((i & 8) ^ (j & 8))*255;
            checkerboard[i][j][0] = (GLubyte) c;
            checkerboard[i][j][1] = (GLubyte) c;
            checkerboard[i][j][2] = (GLubyte) c;
        }
    }

    glGenTextures( 1, &checkerboardTexture);
    glBindTexture(GL_TEXTURE_2D, checkerboardTexture);
    glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
    glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glTexImage2D( GL_TEXTURE_2D, 0, GL_RGB, 64, 64, 0, GL_RGB, GL_UNSIGNED_INT, checkerboard);
}

I think the resulting function will look something like this in JavaScript

function CreateTextures() {
    GLubyte checkerboard[64][64][3];
    int i, j, c;

    for (i=0; i<64; i++) {
        for (j=0; j<64; j++) {
            c = ((i & 8) ^ (j & 8))*255;
            checkerboard[i][j][0] = (GLubyte) c;
            checkerboard[i][j][1] = (GLubyte) c;
            checkerboard[i][j][2] = (GLubyte) c;
        }
    }

    var checkerboardTexture = gl.createTexture();
    gl.bindTexture( gl.TEXTURE_2D, checkerboardTexture);
    gl.texParameterf( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
    gl.texParameterf( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
    gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGB, 64, 64, 0, gl.RGB, gl.UNSIGNED_INT, checkerboard);
}
1

There are 1 answers

0
Peter O. On BEST ANSWER

You don't create a multidimensional array, you create a single-dimensional array like this:

var checkerboard=[]

In the texture array, each pixel will occupy 4 elements: red, green, blue, and alpha, in that order. Here's an example of how you fill each pixel:

// Width is 64 pixels
checkerboard[(i*64+j)*4] = c; // Red component
checkerboard[(i*64+j)*4+1] = c; // Green component
checkerboard[(i*64+j)*4+2] = c; // Blue component
checkerboard[(i*64+j)*4+3] = 0xff; // Alpha component

Next, you convert the array to a byte array:

checkerboard=new Uint8Array(checkerboard);

Finally, you generate the texture. Note also that you can't use gl.RGB in WebGL, usually, but rather gl.RGBA. This requires one extra byte per pixel for the alpha component:

gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, 
   64, 64, 0, gl.RGBA, gl.UNSIGNED_BYTE, checkerboard);

One more thing: WebGL stores textures top-down by default, which may differ from OpenGL. To use bottom-down storage, add the following command before any texImage2D commands:

gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);