/**
* A class creating buffers for a textured box to render it with WebGL
*/
class RasterTextureBox {
/**
* Creates all WebGL buffers for the textured box
* 6 ------- 7
* / | / |
* 3 ------- 2 |
* | | | |
* | 5 -----|- 4
* | / | /
* 0 ------- 1
* looking in negative z axis direction
* @param {WebGLContext} gl - The canvas' context
* @param {Vector} minPoint - The minimal x,y,z of the box
* @param {Vector} maxPoint - The maximal x,y,z of the box
*/
constructor(gl, minPoint, maxPoint, texture) {
this.gl = gl;
const mi = minPoint;
const ma = maxPoint;
let vertices = [
// front
mi.x, mi.y, ma.z, ma.x, mi.y, ma.z, ma.x, ma.y, ma.z,
ma.x, ma.y, ma.z, mi.x, ma.y, ma.z, mi.x, mi.y, ma.z,
// back
ma.x, mi.y, mi.z, mi.x, mi.y, mi.z, mi.x, ma.y, mi.z,
mi.x, ma.y, mi.z, ma.x, ma.y, mi.z, ma.x, mi.y, mi.z,
// right
ma.x, mi.y, ma.z, ma.x, mi.y, mi.z, ma.x, ma.y, mi.z,
ma.x, ma.y, mi.z, ma.x, ma.y, ma.z, ma.x, mi.y, ma.z,
// top
mi.x, ma.y, ma.z, ma.x, ma.y, ma.z, ma.x, ma.y, mi.z,
ma.x, ma.y, mi.z, mi.x, ma.y, mi.z, mi.x, ma.y, ma.z,
// left
mi.x, mi.y, mi.z, mi.x, mi.y, ma.z, mi.x, ma.y, ma.z,
mi.x, ma.y, ma.z, mi.x, ma.y, mi.z, mi.x, mi.y, mi.z,
// bottom
mi.x, mi.y, mi.z, ma.x, mi.y, mi.z, ma.x, mi.y, ma.z,
ma.x, mi.y, ma.z, mi.x, mi.y, ma.z, mi.x, mi.y, mi.z
];
const vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
this.vertexBuffer = vertexBuffer;
this.elements = vertices.length / 3;
let cubeTexture = gl.createTexture();
let cubeImage = new Image();
cubeImage.onload = function () {
gl.bindTexture(gl.TEXTURE_2D, cubeTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, cubeImage);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
}
cubeImage.src = texture;
this.texBuffer = cubeTexture;
let uv = [
// front
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// back
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// right
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// top
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// left
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
// bottom
0, 0, 1, 0, 1, 1,
1, 1, 0, 1, 0, 0,
];
let uvBuffer = this.gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, uvBuffer);
gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array(uv),
gl.STATIC_DRAW);
this.texCoords = uvBuffer;
}
render(shader) {
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.vertexBuffer);
const positionLocation = shader.getAttributeLocation("a_position");
this.gl.enableVertexAttribArray(positionLocation);
this.gl.vertexAttribPointer(positionLocation, 3, this.gl.FLOAT, false, 0, 0);
// Bind the texture coordinates in this.texCoords
// to their attribute in the shader
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.texCoords);
const texCoordLocation = shader.getAttributeLocation("a_texCoord");
this.gl.enableVertexAttribArray(texCoordLocation);
this.gl.vertexAttribPointer(texCoordLocation, 2, this.gl.FLOAT, false, 0, 0);
this.gl.activeTexture(gl.TEXTURE0);
this.gl.bindTexture(gl.TEXTURE_2D, this.texBuffer);
shader.getUniformInt("sampler").set(0);
this.gl.drawArrays(this.gl.TRIANGLES, 0, this.elements);
this.gl.disableVertexAttribArray(positionLocation);
//disable texture vertex attrib array
this.gl.disableVertexAttribArray(texCoordLocation);
}
}
/**
* Class representing a 4x4 Matrix
*/
class Matrix {
constructor(mat) {
this.data = new Float32Array(16);
for (let row = 0; row < 4; row++) {
for (let col = 0; col < 4; col++) {
this.data[row * 4 + col] = mat[col * 4 + row];
}
}
}
getVal(row, col) {
return this.data[col * 4 + row];
}
setVal(row, col, val) {
this.data[col * 4 + row] = val;
}
static translation(translation) {
let m = Matrix.identity();
m.setVal(0, 3, translation.x);
m.setVal(1, 3, translation.y);
m.setVal(2, 3, translation.z);
return m;
}
static rotation(axis, angle) {
let m = Matrix.identity()
let sin = Math.sin(angle);
let cos = Math.cos(angle);
if (axis.x != 0) {
m.setVal(1, 1, cos);
m.setVal(1, 2, -sin);
m.setVal(2, 1, sin);
m.setVal(2, 2, cos);
} else if (axis.y != 0) {
m.setVal(0, 0, cos);
m.setVal(0, 2, sin);
m.setVal(2, 0, -sin);
m.setVal(2, 2, cos);
} else {
m.setVal(0, 0, cos);
m.setVal(0, 1, -sin);
m.setVal(1, 0, sin);
m.setVal(1, 1, cos);
}
return m;
}
static scaling(scale) {
let m = Matrix.identity();
m.setVal(0, 0, scale.x);
m.setVal(1, 1, scale.y);
m.setVal(2, 2, scale.z);
return m;
}
/**
* Constructs a lookat matrix
* @param {Vector} eye - The position of the viewer
* @param {Vector} center - The position to look at
* @param {Vector} up - The up direction
* @return {Matrix} The resulting lookat matrix
*/
static lookat(eye, center, up) {
let fBig = center.sub(eye);
// Vom Eye zum Center Punkt
let f = fBig.normalised();
// UP-Vektor
let upNorm = up.normalised();
// Kreuzprodukt
let s = f.cross(upNorm);
let u = s.normalised().cross(f);
// s, u und f sind die Vektoren des Kamerakoordinatensystems
// Lookat Matrix, 3x3 betrifft Rotation und Skalierung
let mat = new Matrix([
s.x, s.y, s.z, 0,
u.x, u.y, u.z, 0, -f.x, -f.y, -f.z, 0,
0, 0, 0, 1
]);
// Noch weitere Berechnungen? Translation
let trans = Matrix.translation(eye.mul(-1));
mat = mat.mul(trans);
return mat;
}
static frustum(left, right, bottom, top, near, far) {
// TODO [exercise 9]
const n2 = 2 * near;
const rpl = right + left;
const rml = right - left;
const tpb = top + bottom;
const tmb = top - bottom;
const fpn = far + near;
const fmn = far - near;
const n2f = n2 * far;
return new Matrix([
n2 / rml, 0, rpl / rml, 0,
0, n2 / tmb, tpb / tmb, 0,
0, 0, -fpn / fmn, -n2f / fmn,
0, 0, -1, 0
]);
}
static perspective(fovy, aspect, near, far) {
// frustum Methode verwenden. Foliensatz 10
const top = near * Math.tan((Math.PI / 180) * (fovy / 2));
const bottom = -top;
const right = top * aspect;
const left = -right;
return Matrix.frustum(left, right, bottom, top, near, far);
}
/**
* Returns the identity matrix
*/
static identity() {
return new Matrix([
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
]);
}
mul(other) {
if (other instanceof Matrix) {
// [exercise 7]
let m = Matrix.identity();
for (let row = 0; row < 4; row++) {
for (let col = 0; col < 4; col++) {
let sum = 0;
for (let i = 0; i < 4; i++) {
sum += this.getVal(row, i) * other.getVal(i, col);
}
m.setVal(row, col, sum);
}
}
return m;
} else {
let v = [0, 0, 0, 0];
for (let row = 0; row < 4; row++) {
for (let i = 0; i < 4; i++) {
v[row] += this.getVal(row, i) * other.valueOf()[i];
}
}
return new Vector(v[0], v[1], v[2], v[3]);
}
}
transpose() {
let m = Matrix.identity();
for (let row = 0; row < 4; row++) {
for (let col = 0; col < 4; col++) {
m.setVal(row, col, this.getVal(col, row));
}
}
return m;
}
invert() {
let mat = this.data;
let dst = new Float32Array(16); //ret.getValues();
let tmp = new Float32Array(12);
/* temparray for pairs */
let src = new Float32Array(16); //new float[16];
/* array of transpose source matrix */
let det;
for (let i = 0; i < 4; i++) {
src[i] = mat[i * 4];
src[i + 4] = mat[i * 4 + 1];
src[i + 8] = mat[i * 4 + 2];
src[i + 12] = mat[i * 4 + 3];
}
tmp[0] = src[10] * src[15];
tmp[1] = src[11] * src[14];
tmp[2] = src[9] * src[15];
tmp[3] = src[11] * src[13];
tmp[4] = src[9] * src[14];
tmp[5] = src[10] * src[13];
tmp[6] = src[8] * src[15];
tmp[7] = src[11] * src[12];
tmp[8] = src[8] * src[14];
tmp[9] = src[10] * src[12];
tmp[10] = src[8] * src[13];
tmp[11] = src[9] * src[12];
dst[0] = tmp[0] * src[5] + tmp[3] * src[6] + tmp[4] * src[7];
dst[0] -= tmp[1] * src[5] + tmp[2] * src[6] + tmp[5] * src[7];
dst[1] = tmp[1] * src[4] + tmp[6] * src[6] + tmp[9] * src[7];
dst[1] -= tmp[0] * src[4] + tmp[7] * src[6] + tmp[8] * src[7];
dst[2] = tmp[2] * src[4] + tmp[7] * src[5] + tmp[10] * src[7];
dst[2] -= tmp[3] * src[4] + tmp[6] * src[5] + tmp[11] * src[7];
dst[3] = tmp[5] * src[4] + tmp[8] * src[5] + tmp[11] * src[6];
dst[3] -= tmp[4] * src[4] + tmp[9] * src[5] + tmp[10] * src[6];
dst[4] = tmp[1] * src[1] + tmp[2] * src[2] + tmp[5] * src[3];
dst[4] -= tmp[0] * src[1] + tmp[3] * src[2] + tmp[4] * src[3];
dst[5] = tmp[0] * src[0] + tmp[7] * src[2] + tmp[8] * src[3];
dst[5] -= tmp[1] * src[0] + tmp[6] * src[2] + tmp[9] * src[3];
dst[6] = tmp[3] * src[0] + tmp[6] * src[1] + tmp[11] * src[3];
dst[6] -= tmp[2] * src[0] + tmp[7] * src[1] + tmp[10] * src[3];
dst[7] = tmp[4] * src[0] + tmp[9] * src[1] + tmp[10] * src[2];
dst[7] -= tmp[5] * src[0] + tmp[8] * src[1] + tmp[11] * src[2];
tmp[0] = src[2] * src[7];
tmp[1] = src[3] * src[6];
tmp[2] = src[1] * src[7];
tmp[3] = src[3] * src[5];
tmp[4] = src[1] * src[6];
tmp[5] = src[2] * src[5];
tmp[6] = src[0] * src[7];
tmp[7] = src[3] * src[4];
tmp[8] = src[0] * src[6];
tmp[9] = src[2] * src[4];
tmp[10] = src[0] * src[5];
tmp[11] = src[1] * src[4];
dst[8] = tmp[0] * src[13] + tmp[3] * src[14] + tmp[4] * src[15];
dst[8] -= tmp[1] * src[13] + tmp[2] * src[14] + tmp[5] * src[15];
dst[9] = tmp[1] * src[12] + tmp[6] * src[14] + tmp[9] * src[15];
dst[9] -= tmp[0] * src[12] + tmp[7] * src[14] + tmp[8] * src[15];
dst[10] = tmp[2] * src[12] + tmp[7] * src[13] + tmp[10] * src[15];
dst[10] -= tmp[3] * src[12] + tmp[6] * src[13] + tmp[11] * src[15];
dst[11] = tmp[5] * src[12] + tmp[8] * src[13] + tmp[11] * src[14];
dst[11] -= tmp[4] * src[12] + tmp[9] * src[13] + tmp[10] * src[14];
dst[12] = tmp[2] * src[10] + tmp[5] * src[11] + tmp[1] * src[9];
dst[12] -= tmp[4] * src[11] + tmp[0] * src[9] + tmp[3] * src[10];
dst[13] = tmp[8] * src[11] + tmp[0] * src[8] + tmp[7] * src[10];
dst[13] -= tmp[6] * src[10] + tmp[9] * src[11] + tmp[1] * src[8];
dst[14] = tmp[6] * src[9] + tmp[11] * src[11] + tmp[3] * src[8];
dst[14] -= tmp[10] * src[11] + tmp[2] * src[8] + tmp[7] * src[9];
dst[15] = tmp[10] * src[10] + tmp[4] * src[8] + tmp[9] * src[9];
dst[15] -= tmp[8] * src[9] + tmp[11] * src[10] + tmp[5] * src[8];
det = src[0] * dst[0] + src[1] * dst[1] + src[2] * dst[2] + src[3] * dst[3];
if (det == 0.0) {
throw new Error("singular matrix is not invertible");
}
/* calculate matrix inverse */
det = 1 / det;
for (let j = 0; j < 16; j++) {
dst[j] *= det;
}
let ret = Matrix.identity();
ret.data = dst;
return ret;
}
}
/**
* Class representing a vector in 4D space
*/
class Vector {
/**
* Create a vector
* @param {number} x - The x component
* @param {number} y - The y component
* @param {number} z - The z component
* @param {number} w - The w component
* @return {number} The resulting vector
*/
constructor(x, y, z, w) {
this.data = [x, y, z, w];
}
//has getter and setter
add(other) {
return new Vector(
this.x + other.x,
this.y + other.y,
this.z + other.z,
this.w + other.w
);
}
sub(other) {
return new Vector(
this.x - other.x,
this.y - other.y,
this.z - other.z,
this.w - other.w
);
}
mul(other) {
return new Vector(
this.x * other,
this.y * other,
this.z * other,
this.w
);
}
div(other) {
return new Vector(
this.x / other,
this.y / other,
this.z / other,
this.w
);
}
dot(other) {
if (other instanceof Vector) {
return this.x * other.x + this.y * other.y + this.z * other.z;
} else {
throw new Error("Dot product only works with vectors!");
}
}
cross(other) {
if (other instanceof Vector) {
return new Vector(
this.y * other.z - this.z * other.y,
this.z * other.x - this.x * other.z,
this.x * other.y - this.y * other.x,
0
);
} else {
throw new Error("Dot product only works with vectors!");
}
}
valueOf() {
return this.data;
}
normalised() {
const l = this.length;
return this.div(l);
}
equals(other) {
return (
Math.abs(this.x - other.x) <= Number.EPSILON &&
Math.abs(this.y - other.y) <= Number.EPSILON &&
Math.abs(this.z - other.z) <= Number.EPSILON &&
((!this.w && !other.w) || Math.abs(this.w - other.w) <= Number.EPSILON)
);
}
get length() {
return Math.sqrt(this.x * this.x + this.y * this.y + this.z * this.z);
}
}
/**
* Class representing a Node in a Scenegraph
*/
class Node {
/**
* Accepts a visitor according to the visitor pattern
* @param {Visitor} visitor - The visitor
*/
accept(visitor) { }
}
/**
* Class representing a GroupNode in the Scenegraph.
* A GroupNode holds a transformation and is able
* to have child nodes attached to it.
* @extends Node
*/
class GroupNode extends Node {
/**
* Constructor
* @param {Matrix} mat - A matrix describing the node's transformation
*/
constructor(mat) {
super();
this.matrix = mat;
// TODO [exercise 8]
this.children = [];
}
/**
* Accepts a visitor according to the visitor pattern
* @param {Visitor} visitor - The visitor
*/
accept(visitor) {
// TODO [exercise 8]
visitor.visitGroupNode(this);
}
/**
* Adds a child node
* @param {Node} childNode - The child node to add
*/
add(childNode) {
// TODO [exercise 8]
this.children.push(childNode);
}
}
/**
* Class representing a Textured Axis Aligned Box in the Scenegraph
* @extends Node
*/
class TextureBoxNode extends Node {
constructor(minPoint, maxPoint, texture) {
super();
this.minPoint = minPoint;
this.maxPoint = maxPoint;
this.texture = texture;
}
accept(visitor) {
// TODO [exercise 8]
visitor.visitTextureBoxNode(this);
}
}
//Texture Fragment Shader
precision mediump float;
uniform sampler2D sampler;
varying vec2 v_texCoord;
void main( void ) {
//gl_FragColor = vec4( 0.0, 0.0, 0.5, 1.0 );
// Read fragment color from texture
// TODO [exercise 9]
gl_FragColor = texture2D(sampler, vec2(v_texCoord.s, v_texCoord.t));
}
//Texture Vertex Shader
attribute vec3 a_position;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
uniform mat4 M;
uniform mat4 V;
uniform mat4 P;
void main() {
gl_Position = P * V * M * vec4( a_position, 1.0 );
v_texCoord = a_texCoord;
}
// Phong Vertex Shader
attribute vec3 a_position;
attribute vec3 a_normal;
// Pass color as attribute and forward it
// to the fragment shader
attribute vec4 a_color;
uniform mat4 M;
uniform mat4 V;
uniform mat4 P;
uniform mat4 N; // normal matrix
varying vec3 v_normal;
// Pass the vertex position in view space
// to the fragment shader
// TODO [exercise 9]
varying vec4 v_position;
varying vec4 v_color;
void main() {
gl_Position = P * V * M * vec4( a_position, 1.0 );
// Pass the color and transformed vertex position through
v_position = gl_Position;
v_color = a_color;
v_normal = (N * vec4(a_normal, 0)).xyz;
}
//Phong Fragment Shader
//precision mediump float;
// TODO [exercise 5]
//void main( void ) {
//gl_FragColor = vec4( 0.0, 0.0, 0.5, 1.0 );
// TODO [exercise 5]
//}
// Wird mindestens einmal pro Pixel ausgefuehrt
precision mediump float;
// TODO [exercise 5]
varying vec4 v_color;
varying vec4 v_position;
varying vec3 v_normal;
const vec3 lightPos = vec3(0.2,-1.0,-1.0);
const float shininess = 16.0;
const float k_a = 1.0;
const float k_d = 0.6;
const float k_s = 0.3;
// Farbe von Vertex shader durchreichen und Interpolieren
void main( void ) {
// Rot, Gruen, Blau, Alpha
//gl_FragColor = vec4( 0.0, 0.0, 0.5, 1.0 );
// TODO [exercise 5]
vec3 vertPos = vec3(v_position) / v_position.w;
vec3 N = normalize(v_normal);
vec3 L = normalize(lightPos - vertPos);
vec4 L_j = vec4(1,1,1,1);
vec4 diffuse = L_j * max(dot(N, L), 0.0);
vec3 R = reflect(-L, N);
vec3 V = normalize(-vertPos);
float specAngle = max(dot(R, V), 0.0);
vec4 specular = L_j * pow(specAngle, shininess);
vec4 color = vec4(k_a * v_color + k_d * diffuse + k_s * specular);
gl_FragColor = color;
}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>ICG-11 Animation</title>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u"
crossorigin="anonymous">
</head>
<body>
<div class="container text-center">
<h1>ICG Animation</h1>
<hr>
<p>Implement a Rasteriser with WebGL using a Scenegraph.</p>
<canvas id="rasteriser" width="500" height="500"></canvas>
<script src="vector.js"></script>
<script src="raster-texture-box.js"></script>
<script src="matrix.js"></script>
<script src="nodes.js"></script>
<script src="rastervisitor.js"></script>
<script src="shader.js"></script>
<script src="animation-nodes.js"></script>
<script>
const canvas = document.getElementById("rasteriser");
const gl = canvas.getContext("webgl");
// construct scene graph
const sg = new GroupNode(Matrix.scaling(new Vector(0.2, 0.2, 0.2)));
const gn1 = new GroupNode(Matrix.translation(new Vector(1, 1, 0)));
sg.add(gn1);
let gn2 = new GroupNode(Matrix.translation(new Vector(-.7, -0.4, .1)));
sg.add(gn2);
const cube = new TextureBoxNode(
new Vector(-1, -1, -1, 1),
new Vector(1, 1, 1, 1),
'diffuse.png'
);
gn2.add(cube);
// setup for rendering
const setupVisitor = new RasterSetupVisitor(gl);
setupVisitor.setup(sg);
const visitor = new RasterVisitor(gl);
let camera = {
eye: new Vector(-.5, .5, -1, 1),
center: new Vector(0, 0, 0, 1),
up: new Vector(0, 1, 0, 0),
fovy: 60,
aspect: canvas.width / canvas.height,
near: 0.1,
far: 100
};
const phongShader = new Shader(gl,
"phong-vertex-perspective-shader.glsl",
"phong-fragment-shader.glsl"
);
visitor.shader = phongShader;
const textureShader = new Shader(gl,
"texture-vertex-perspective-shader.glsl",
"texture-fragment-shader.glsl"
);
visitor.textureshader = textureShader;
let animationNodes = [
new RotationNode(gn2, new Vector(0, 0, 1))
];
function simulate(deltaT) {
for (animationNode of animationNodes) {
animationNode.simulate(deltaT);
}
}
let lastTimestamp = performance.now();
function animate(timestamp) {
simulate(timestamp - lastTimestamp);
visitor.render(sg, camera);
lastTimestamp = timestamp;
window.requestAnimationFrame(animate);
}
Promise.all(
[textureShader.load(), phongShader.load()]
).then(x =>
window.requestAnimationFrame(animate)
);
</script>
</div>
</body>
</html>
Hey there I`m trying since a while now to add a second texture
to my cube and do some bump mapping. But I am a progam beginner, so its kinda hard for me. All my maths for the matrix and vector are in the same named js.files. I also have to kinds of shaders, the texture and the phong shader. Now everyone says I have to calculate my normals, but how do I do that? And where? Looking forward for your help!

With a normal map as in the question, Bump mapping can be performed. At bump mapping the normal vector of a fragment is read from a normal map and used for the light calculations.
In general the incident light vector is transformed into texture space. This is the "orientation" of the normal map on the object (fragment). In order to set up a 3x3 orientation matrix that describes the orientation of the map, the tangent vector and the bi-tangent vector as well as the normal vector must be known. If there is no tangent vector and no bi-tangent vector, the vectors can be approximated by the partial derivative of the vertex position and the texture coordinate in the fragment shader.
So at least the texture coordinate and the normal vector attribute are required. In the fragment shader the calculations are done in world space respectively texture space. The vertex shader is straight forward:
In the fragment shader, the normal vector is read from the normal map:
The light vector is transformed to texture space:
With this vector the light calculations can be performed:
To calculate the matrix which transforms form world space to texture space, the partial derivative functions (
dFdx,dFdy) are required. This causes that the "OES_standard_derivatives" has to be enabled (or "webgl2" context):The algorithm to calculate the tangent vector and binormal vector is explained in another answer - How to calculate Tangent and Binormal?.
Final fragment shader:
And will produce bump mapping like this:
If the tangent vector is know the calculation of
tbn_invmatrix can be simplified very much:If you want Parallax mapping like in this Example then a displacement map is required, too.
The white areas on this map are pushed "into" the object. The algorithm is described in detail at LearnOpengl - Parallax Mapping.
The idea is that each fragment is associated to a height of the displacement map. This can be imagined as a rectangular pillar standing on the fragment. The view ray is tracked until a displaced fragment is hit.
For a high performance algorithm samples are taken of the displacement texture. When a fragment is identified, then the corresponding fragment of the e normal map and the diffuse texture is read. This gives a 3 dimensional look of the geometry. Note this algorithm is bot able to handle silhouettes.
Final fragment shader with steep parallax mapping:
The result is much more impressive: