Hands-On Game Development with WebAssembly
上QQ阅读APP看书,第一时间看更新

The ModuleLoaded function

In the old 2D canvas code, we defined the ShipPosition JavaScript function before the ModuleLoaded function, but we have swapped these two functions for the WebGL demo. I felt it was better to explain the WebGL initialization before the rendering portion of the code. Here is the new version of the ModuleLoaded function in its entirety:

function ModuleLoaded() {
canvas = document.getElementById('canvas');
gl = canvas.getContext("webgl", { alpha: false }) ||
canvas.getContext("experimental-webgl", {
alpha: false });

if (!gl) {
console.log("No WebGL support!");
return;
}

gl.blendFunc( gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA );
gl.enable( gl.BLEND );

var vertex_shader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource( vertex_shader, vertex_shader_code );
gl.compileShader( vertex_shader );

if( !gl.getShaderParameter(vertex_shader, gl.COMPILE_STATUS) ) {
console.log('Failed to compile vertex shader' +
gl.getShaderInfoLog(vertex_shader));
gl.deleteShader(vertex_shader);
return;
}

var fragment_shader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource( fragment_shader, fragment_shader_code );
gl.compileShader( fragment_shader );

if( !gl.getShaderParameter(fragment_shader, gl.COMPILE_STATUS) ) {
console.log('Failed to compile fragment shader' +
gl.getShaderInfoLog(fragment_shader));
gl.deleteShader(fragment_shader);
return;
}

program = gl.createProgram();

gl.attachShader(program, vertex_shader);
gl.attachShader(program, fragment_shader);
gl.linkProgram(program);

if( !gl.getProgramParameter(program, gl.LINK_STATUS) ) {
console.log('Failed to link program');
gl.deleteProgram(program);
return;
}

gl.useProgram(program);

u_texture_location = gl.getUniformLocation(program, "u_texture");
u_translate_location = gl.getUniformLocation(program,
"u_translate");

a_position_location = gl.getAttribLocation(program, "a_position");
a_texcoord_location = gl.getAttribLocation(program, "a_texcoord");

vertex_texture_buffer = gl.createBuffer();

gl.bindBuffer(gl.ARRAY_BUFFER, vertex_texture_buffer);
gl.bufferData(gl.ARRAY_BUFFER, vertex_texture_data,
gl.STATIC_DRAW);

gl.enableVertexAttribArray(a_position_location);
gl.vertexAttribPointer(a_position_location, 2, gl.FLOAT, false,
STRIDE, XY_OFFSET);

gl.enableVertexAttribArray(a_texcoord_location);
gl.vertexAttribPointer(a_texcoord_location, 2, gl.FLOAT, false,
STRIDE, UV_OFFSET);

texture = gl.createTexture();

gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT);

gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);

img = new Image();
img.addEventListener('load', function() {
image_width = img.width;
image_height = img.height;

gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA,
gl.UNSIGNED_BYTE, img );
});
img.src = "spaceship.png";

gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
}

The first few lines get the canvas element and use that to get a WebGL context. If the JavaScript fails to get the WebGL context, we alert the user, letting them know they have a browser that does not support WebGL:

canvas = document.getElementById('canvas');

gl = canvas.getContext("webgl", { alpha: false }) ||
canvas.getContext("experimental-webgl", {
alpha: false });
if (!gl) {
console.log("No WebGL support!");
return;
}

The two lines after that turn on alpha blending:

gl.blendFunc( gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA );
gl.enable( gl.BLEND );

Compiling, loading, and linking the vertex and the fragment shader is a lot of challenging code. I am not sure why there is no function inside of the WebGL library that does all of this in one step. Almost everyone writing webgl for 2D to do this, and they either put it into a separate .js file, or they copy and paste it into their code for every project. For now, all you need to know about the following batch of code is that it is taking the vertex and fragment shader we wrote earlier and compiling it into the program variable. From that point on, we will be using the program variable to interact with the shaders. Here is the code:

var vertex_shader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource( vertex_shader, vertex_shader_code );
gl.compileShader( vertex_shader );

if( !gl.getShaderParameter(vertex_shader, gl.COMPILE_STATUS) ) {
console.log('Failed to compile vertex shader' +
gl.getShaderInfoLog(vertex_shader));
gl.deleteShader(vertex_shader);
return;
}

var fragment_shader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource( fragment_shader, fragment_shader_code );
gl.compileShader( fragment_shader );

if( !gl.getShaderParameter(fragment_shader, gl.COMPILE_STATUS) ) {
console.log('Failed to compile fragment shader' +
gl.getShaderInfoLog(fragment_shader));
gl.deleteShader(fragment_shader);
return;
}

program = gl.createProgram();
gl.attachShader(program, vertex_shader);
gl.attachShader(program, fragment_shader);
gl.linkProgram(program);

if( !gl.getProgramParameter(program, gl.LINK_STATUS) ) {
console.log('Failed to link program');
gl.deleteProgram(program);
return;
}
gl.useProgram(program);

Now that we have the WebGLProgram object in our program variable, we can use that object to interact with our shaders.

  1. The first thing we are going to do is grab references to the uniform variables in our shader programs:
u_texture_location = gl.getUniformLocation(program, "u_texture");
u_translate_location = gl.getUniformLocation(program, "u_translate");
  1. After that, we will use the program object to get references to the attribute variables that are used by our vertex shader:
a_position_location = gl.getAttribLocation(program, "a_position");
a_texcoord_location = gl.getAttribLocation(program, "a_texcoord");
  1. Now, it is time to start working with buffers. Do you remember when we created that Float32Array with all of our vertex data in it? It is time to use buffers to send that data to the GPU:
vertex_texture_buffer = gl.createBuffer();

gl.bindBuffer(gl.ARRAY_BUFFER, vertex_texture_buffer);
gl.bufferData(gl.ARRAY_BUFFER, vertex_texture_data,
gl.STATIC_DRAW);

gl.enableVertexAttribArray(a_position_location);
gl.vertexAttribPointer(a_position_location, 2, gl.FLOAT, false,
STRIDE, XY_OFFSET);

gl.enableVertexAttribArray(a_texcoord_location);
gl.vertexAttribPointer(a_texcoord_location, 2, gl.FLOAT, false,
STRIDE, UV_OFFSET);

The first line creates a new buffer called vertex_texture_buffer. The line that starts with gl.bindBuffer binds vertex_texture_buffer to ARRAY_BUFFER, and then bufferData adds the data we had in vertex_texture_data to ARRAY_BUFFER. After that, we need to use the references to a_position and a_texcoord that we created earlier in the a_position_location and a_texcoord_location variables to tell WebGL where in this array buffer it will find the data for the a_position and a_texcoord attributes. The first thing it does is call enableVertexAttribArray to enable that attribute using the location variable we created. Next, vertexAttribPointer uses the STRIDE and XY_OFFSET or UV_OFFSET to tell WebGL where the attribute data is inside of the buffer data.

  1. After that, we will create and bind a texture buffer:
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
  1. Now that we have a bound texture buffer, we can configure that buffer for mirror wrapping and nearest neighbor interpolation when scaling:
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT);

gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);

We are using gl.NEAREST instead of gl.LINEAR because I would like the game to have an old-school pixelated look. In your game, you may prefer a different algorithm.

  1. After configuring the texture buffer, we are going to download the spaceship.png image and load that image data into the texture buffer:
img = new Image();

img.addEventListener('load', function() {
image_width = img.width;
image_height = img.height;

gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA,
gl.UNSIGNED_BYTE, img );
});

img.src = "spaceship.png";
  1. The final thing we will do is set the viewport to go from (0,0) to the canvas width and height. The viewport tells WebGL how the space in the canvas element will relate to our WebGL clip space:
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);