Three.js audio reactive shader page single page PWA - audio

Why is this not canvas not drawing?
Tried adjusting three.js grammar, but no results yet.
I'm a bit lost as to what might need to be different, although have highlighted some dodgy looking punctution functions. I'm used ot GLSL rather than javascript.
This is not working correctly below
var shaderMaterial = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: `
void main() {
gl_Position = vec4( position, 1.0 );
}
`,
fragmentShader:
uniform vec2 iResolution;
uniform float iGlobalTime;
uniform float iChannel0[32];
uniform float speed;
vec2 rotate(vec2 v, float a) {
return vec2(cos(a)*v.x-sin(a)*v.y, sin(a)*v.x+cos(a)*v.y);
}
void main() {
vec2 uv = gl_FragCoord.xy / iResolution.xy - 0.5;
uv = rotate(uv, iGlobalTime * 0.1);
uv *= 1.0 + 0.5 * iChannel0[0] * speed;
float v = sin(uv.x * 10.0) * 0.5 + 0.5;
v = pow(v, 2.0);
gl_FragColor = vec4( vec3( v ), 1.0 );
}
});
// Render loop
function render() {
requestAnimationFrame(render);
uniforms.iGlobalTime.value += 0.05;
uniforms.speed.value = 1 + 0.5 * analyser.getAverageFrequency() / 256;
renderer.render(scene, camera);
}
render();
})
.catch(function (error) {
console.error(error);
});
</script>
</body>
</html>

Related

Plotting a discrete-time signal shows amplitude modulation

I'm trying to render a simple discrete-time signal using a canvas element. However, the representation seems to be inaccurate. As you can see in the code snippet the signal appears to be amplitude modulated after the frequency reaches a certain threshold. Even though it's well below the Nyquist limit of <50Hz (assuming a sampling rate of 100Hz in this example).
For very low frequencies like 5Hz it looks perfectly fine.
How would I go about rendering this properly? And does it work for more complex signals (say, the waveform of a song)?
window.addEventListener('load', () => {
const canvas = document.querySelector('canvas');
const frequencyElem = document.querySelector('#frequency');
const ctx = canvas.getContext('2d');
const renderFn = t => {
const signal = new Array(100);
const sineOfT = Math.sin(t / 1000 / 8 * Math.PI * 2) * 0.5 + 0.5;
const frequency = sineOfT * 20 + 3;
for (let i = 0; i < signal.length; i++) {
signal[i] = Math.sin(i / signal.length * Math.PI * 2 * frequency);
}
frequencyElem.innerText = `${frequency.toFixed(3)}Hz`
render(ctx, signal);
requestAnimationFrame(renderFn);
};
requestAnimationFrame(renderFn);
});
function render(ctx, signal) {
const w = ctx.canvas.width;
const h = ctx.canvas.height;
ctx.clearRect(0, 0, w, h);
ctx.strokeStyle = 'red';
ctx.beginPath();
signal.forEach((value, i) => {
const x = i / (signal.length - 1) * w;
const y = h - (value + 1) / 2 * h;
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
}
});
ctx.stroke();
}
#media (prefers-color-scheme: dark) {
body {
background-color: #333;
color: #f6f6f6;
}
}
<canvas></canvas>
<br/>
Frequency: <span id="frequency"></span>
It looks right to me. At higher frequencies, when the peak falls between two samples, the sampled points can be a lot lower than the peak.
If the signal only has frequencies < Nyquist, then the signal can be reconstructed from its samples. That doesn't mean that the samples look like the signal.
As long as your signal is oversampled by 2x or more (or so), you can draw it pretty accurately by using cubic interpolation between the sample points. See, for example, Catmull-Rom interpolation in here: https://en.wikipedia.org/wiki/Cubic_Hermite_spline
You can use the bezierCurveTo method in HTML Canvas to draw these interpolated curves. If you need to use lines, then you should find any maximum or minimum points that occur between samples and include those in your path.
I've edited your snippet to use the bezierCurveTo method with Catmull-Rom interpolation below:
window.addEventListener('load', () => {
const canvas = document.querySelector('canvas');
const frequencyElem = document.querySelector('#frequency');
const ctx = canvas.getContext('2d');
const renderFn = t => {
const signal = new Array(100);
const sineOfT = Math.sin(t / 1000 / 8 * Math.PI * 2) * 0.5 + 0.5;
const frequency = sineOfT * 20 + 3;
for (let i = 0; i < signal.length; i++) {
signal[i] = Math.sin(i / signal.length * Math.PI * 2 * frequency);
}
frequencyElem.innerText = `${frequency.toFixed(3)}Hz`
render(ctx, signal);
requestAnimationFrame(renderFn);
};
requestAnimationFrame(renderFn);
});
function render(ctx, signal) {
const w = ctx.canvas.width;
const h = ctx.canvas.height;
ctx.clearRect(0, 0, w, h);
ctx.strokeStyle = 'red';
ctx.beginPath();
const dx = w/(signal.length - 1);
const dy = -(h-2)/2.0;
const c = 1.0/2.0;
for (let i=0; i < signal.length-1; ++i) {
const x0 = i * dx;
const y0 = h*0.5 + signal[i]*dy;
const x3 = x0 + dx;
const y3 = h*0.5 + signal[i+1]*dy;
let x1,y1,x2,y2;
if (i>0) {
x1 = x0 + dx*c;
y1 = y0 + (signal[i+1] - signal[i-1])*dy*c/2;
} else {
x1 = x0;
y1 = y0;
ctx.moveTo(x0, y0);
}
if (i < signal.length-2) {
x2 = x3 - dx*c;
y2 = y3 - (signal[i+2] - signal[i])*dy*c/2;
} else {
x2 = x3;
y2 = y3;
}
ctx.bezierCurveTo(x1,y1,x2,y2,x3,y3);
}
ctx.stroke();
}
#media (prefers-color-scheme: dark) {
body {
background-color: #333;
color: #f6f6f6;
}
}
<canvas></canvas>
<br/>
Frequency: <span id="frequency"></span>

How to do static environment mapping in DirectX 11?

I have the .dds cube map texture loaded it in and put it on the sphere model. Obviously it doesn't look right, because I would have to map the texture to the right points on the sphere.
how can I map the texture to the right points on the sphere?
somethings like that:
TextureCube TEXTURE_REFLECTION;
//...
VS_OUTPUT vs(VS_INPUT IN) {
//...
float4 worldPosition = mul(float4(IN.Position.xyz, 1.0f), IN.World);
OUT.worldpos = worldPosition.xyz;
//...
}
PS_OUTPUT ps(VS_OUTPUT IN) {
//...
float4 ColorTex = TEXTURE_DIFFUSE.Sample(SAMPLER_DEFAULT, IN.TexCoord);
float3 normalFromMap = mul(2.0f * TEXTURE_NORMAL.Sample(SAMPLER_DEFAULT, IN.TexCoord).xyz - 1.0f), IN.tangentToWorld);
//...
float3 incident = -normalize(CAMERA_POSITION - IN.worldpos);
float3 reflectionVector = reflect(incident, normalFromMap);
ColorTex.rgb = lerp(ColorTex.rgb, TEXTURE_REFLECTION.Sample(SAMPLER_DEFAULT, reflectionVector).rgb, MATERIAL_REFLECTIVITY);
}
I figured it out.
Pixel shader:
TextureCube CubeMap: register(t0);
SamplerState TexSampler : register(s0);
float4 main(LightingPixelShaderInput input) : SV_Target
{
float4 cubeTexture = CubeMap.Sample(TexSampler, input.worldNormal);
//light calculations
float3 finalColour = (gAmbientColour + diffuseLights) * cubeTexture.rgb +
(specularLights) * cubeTexture.a;
return float4(finalColour, 1.0f);
}

WebGL creating multiple objects?

So I am trying to create circles using the midpoint algorithm. I'm having trouble on how to handle buffers and basically get WebGL properly set up. Using the console I can see that the algorithm is working fine and making the vertex arrray, but I need help understanding what to do with the use.Program, createBuffers, drawArrays. Where should I place them?
Also, should I concat the circle everytime I call it in the START() function?
like: circle(blah blah).concat(circle(blah blah));
var vertexShaderText =
[
'precision mediump float;',
'',
'attribute vec2 vertPosition;',
'attribute vec3 vertColor;',
'varying vec3 fragColor;',
'',
'void main()',
'{',
' fragColor = vertColor;',
' gl_Position = vec4(vertPosition, 0.0, 1.0);',
'}'
].join('\n');
var fragmentShaderText =
[
'precision mediump float;',
'',
'varying vec3 fragColor;',
'void main()',
'{',
' gl_FragColor = vec4(fragColor, 1.0);',
'}'
].join('\n');
var START = function () {
console.log('This is working');
var canvas = document.getElementById('sky');
var gl = canvas.getContext('webgl');
if (!gl) {
console.log('WebGL not supported, falling back on experimental-webgl');
gl = canvas.getContext('experimental-webgl');
}
if (!gl) {
alert('Your browser does not support WebGL');
}
gl.clearColor(.3, .3, .7, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Create shaders
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
var fragmentShader =
gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(vertexShader, vertexShaderText);
gl.shaderSource(fragmentShader, fragmentShaderText);
//create a program for the shaders
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
gl.useProgram(program);
var circle = function (xmid, ymid, r) {
var points = [];
var x = 0;
var y = r;
var pk = 5/4 - r;
while (x < y)
{
if (pk < 0)
{
x++;
pk += 2*x + 1;
}
else
{
x++;
y--;
pk += 2 * (x-y) + 1;
}
points.push(x+xmid, y+ymid);
points.push(x+xmid, -y+ymid);
points.push(-x+xmid, y+ymid);
points.push(-x+xmid, -y+ymid);
points.push(y+xmid, x+ymid);
points.push(y+xmid, -x+ymid);
points.push(-y+xmid, x+ymid);
points.push(-y+xmid, -x+ymid);
}
var cbuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cbuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(points),
gl.STATIC_DRAW);
gl.drawArrays(gl.POINTS, 0, points.length/2);
var positionAttribLocation = gl.getAttribLocation(program,
'vertPosition');
var colorAttribLocation = gl.getAttribLocation(program,
'vertColor');
gl.vertexAttribPointer(
positionAttribLocation, // Attribute location
2, // Number of elements per attribute
gl.FLOAT, // Type of elements
gl.FALSE,
5 * Float32Array.BYTES_PER_ELEMENT, // Size of an individual vertex
0 // Offset from the beginning of a single vertex to this attribute
);
gl.enableVertexAttribArray(positionAttribLocation);
gl.enableVertexAttribArray(colorAttribLocation);
return points;
}
circle(0.6, 0.6, 0.18);
circle(0.9, 0.6, 0.18);
circle(0.5, 0.4, 0.18);
circle(1.0, 0.4, 0.18);
circle(0.75, 0.4, 0.18);
circle(0.75, 0.4, 0.18);
}
START();
<canvas id="sky"></canvas>
This is what my console log is saying:
6WebGL: INVALID_OPERATION: useProgram: program not
valid
6WebGL: INVALID_OPERATION: drawArrays: no valid shader
program in use
12WebGL: INVALID_OPERATION: getAttribLocation: program
not linked
You can clearly see that I am linking and using the program at the very beginning. So what gives?
There's more than one issue with the code
The shaders are not compiled
After setting the shader source with gl.shaderSource you need
to compile them with gl.compileShader. You should also
be checking for errors by calling gl.getShaderParameter(shader, gl.COMPILE_STATUS)
and you should be checking for errors after linking by calling
gl.getProgramParameter(program, gl.LINK_STATUS)
gl.drawArrays is called before setting the attributes
The code is enabling 2 attributes but only supplying data for 1 attribute.
The code is drawing gl.POINTS but the vertex shader is not setting gl_PointSize
I also don't really understand your circle code but since I don't know what it's really trying to do I can't fix it.
And finally you should probably read some tutorials on WebGL
I'd also suggest you use multiline template literals for your shaders
const vertexShaderText = `
precision mediump float;
attribute vec2 vertPosition;
attribute vec3 vertColor;
varying vec3 fragColor;
void main()
{
fragColor = vertColor;
gl_Position = vec4(vertPosition, 0.0, 1.0);
gl_PointSize = 5.;
}
`;
const fragmentShaderText = `
precision mediump float;
varying vec3 fragColor;
void main()
{
gl_FragColor = vec4(fragColor, 1.0);
}
`;
const start = function () {
console.log('This is working');
const canvas = document.getElementById('sky');
const gl = canvas.getContext('webgl');
if (!gl) {
alert('Your browser does not support WebGL');
return;
}
gl.clearColor(.3, .3, .7, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
//create a shader program
const program = createProgram(gl, vertexShaderText, fragmentShaderText);
gl.useProgram(program);
const circle = function (xmid, ymid, r) {
const points = [];
let x = 0;
let y = r;
let pk = 5/4 - r;
while (x < y)
{
if (pk < 0)
{
x++;
pk += 2*x + 1;
}
else
{
x++;
y--;
pk += 2 * (x-y) + 1;
}
points.push(x+xmid, y+ymid);
points.push(x+xmid, -y+ymid);
points.push(-x+xmid, y+ymid);
points.push(-x+xmid, -y+ymid);
points.push(y+xmid, x+ymid);
points.push(y+xmid, -x+ymid);
points.push(-y+xmid, x+ymid);
points.push(-y+xmid, -x+ymid);
}
const cbuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cbuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(points), gl.STATIC_DRAW);
const positionAttribLocation = gl.getAttribLocation(program, 'vertPosition');
const colorAttribLocation = gl.getAttribLocation(program, 'vertColor');
gl.vertexAttribPointer(
positionAttribLocation, // Attribute location
2, // Number of elements per attribute
gl.FLOAT, // Type of elements
gl.FALSE,
0, // Size of an individual vertex
0 // Offset from the beginning of a single vertex to this attribute
);
gl.enableVertexAttribArray(positionAttribLocation);
// you probably meant to supply colors for this attribute
// since if you wanted a constant color you'd have probably
// used a uniform but since you didn't we'll set a constant
// color
gl.vertexAttrib4f(colorAttribLocation, 1, 0, 0, 1);
gl.drawArrays(gl.POINTS, 0, points.length/2);
return points;
}
circle(0.6, 0.6, 0.18);
circle(0.9, 0.6, 0.18);
circle(0.5, 0.4, 0.18);
circle(1.0, 0.4, 0.18);
circle(0.75, 0.4, 0.18);
circle(0.75, 0.4, 0.18);
}
function createProgram(gl, vertexShaderText, fragmentShaderText) {
// Create shaders
const vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderText);
const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentShaderText);
const program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
return null;
}
return program;
}
function createShader(gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
start();
<canvas id="sky"></canvas>

Implementing a gradient shader in three.js

I am trying to learn about shaders using three.js. What I am trying to do is create a shader that generates gradients to texture planets with. Right now I am just trying to generate one gradient to make sure it works. However, when I apply the shader it only renders one of the colors, and does not create the gradient effect I'm looking for. I can't seem to find where I'm going wrong with my code.
I'm using the Book of Shaders as the basis for my code. Specifically, I was looking at this example, trying to replicate the background color.
Here is my shader code:
<section id="fragmentshader">
#ifdef GL_ES
precision mediump float;
#endif
// #define PI 3.14159265359
uniform vec2 u_resolution;
// uniform vec2 u_mouse;
// uniform float u_time;
vec3 colorA = vec3(0.500,0.141,0.912);
vec3 colorB = vec3(1.000,0.833,0.224);
void main() {
vec2 st = gl_FragCoord.xy/u_resolution.xy;
vec3 color = vec3(0.0);
color = mix( colorA,
colorB,
st.y);
gl_FragColor = vec4(color,1.0);
}
</section>
<section id="vertexshader">
void main() {
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</section>
and my three.js code inside an a-frame component:
var uniforms = {
u_resolution: { type: "v2", value: new THREE.Vector2() },
};
var fShader = $('#fragmentshader');
var vShader = $('#vertexshader');
var geometry = new THREE.SphereGeometry(getRandomInt(100, 250), 20, 20);
// var material = new THREE.MeshBasicMaterial( {wireframe: true });
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vShader.text(),
fragmentShader: fShader.text()
});
var sphere = new THREE.Mesh(geometry, material);
This is what my spheres currently look like
var camera, scene, renderer, mesh, material;
init();
animate();
function init() {
// Renderer.
renderer = new THREE.WebGLRenderer();
//renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
// Add renderer to page
document.body.appendChild(renderer.domElement);
// Create camera.
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 1, 1000);
camera.position.z = 400;
// Create scene.
scene = new THREE.Scene();
var uniforms = {
"color1" : {
type : "c",
value : new THREE.Color(0xffffff)
},
"color2" : {
type : "c",
value : new THREE.Color(0x000000)
},
};
var fShader = document.getElementById('fragmentShader').text;
var vShader = document.getElementById('vertexShader').text;
// Create material
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vShader,
fragmentShader: fShader
});
// Create cube and add to scene.
var geometry = new THREE.BoxGeometry(200, 200, 200);
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
// Create ambient light and add to scene.
var light = new THREE.AmbientLight(0x404040); // soft white light
scene.add(light);
// Create directional light and add to scene.
var directionalLight = new THREE.DirectionalLight(0xffffff);
directionalLight.position.set(1, 1, 1).normalize();
scene.add(directionalLight);
// Add listener for window resize.
window.addEventListener('resize', onWindowResize, false);
}
function animate() {
requestAnimationFrame(animate);
mesh.rotation.x += 0.005;
mesh.rotation.y += 0.01;
renderer.render(scene, camera);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
<script src="https://rawgit.com/mrdoob/three.js/r86/build/three.min.js"></script>
<script id="vertexShader" type="x-shader/x-vertex">
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
uniform vec3 color1;
uniform vec3 color2;
varying vec2 vUv;
void main() {
gl_FragColor = vec4(mix(color1, color2, vUv.y),1.0);
}
</script>

draw a texture pixelated

I have a bitmap that I want to draw zoomed in and with straight, defined edges between pixels.
I have tried setting the MAG filter to NEAREST:
gl.bindTexture(gl.TEXTURE_2D,this.tex);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MIN_FILTER,gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MAG_FILTER,gl.NEAREST);
...
gl.drawArrays(gl.TRIANGLES,0,6); // draw the texture
However, when I draw it the pixels bleed together:
This is webGL so I have shaders. My shader code is super-simple:
Vertex shader:
...
attribute vec2 texCoord;
varying vec2 texel;
void main() {
texel = texCoord;
...
And fragment shader:
...
varying vec2 texel;
uniform sampler2D texture;
void main() {
vec4 fragColour = texture2D(texture,texel);
...
Here's a larger snippet of the relative code:
map = {
vbo: gl.createBuffer(),
tex: gl.createTexture(),
program: createProgram(
"precision mediump float;\n"+
"uniform mat4 mvMatrix, pMatrix;\n"+
"attribute vec3 vertex;\n"+
"attribute vec2 texCoord;\n"+
"varying vec2 texel;\n"+
"void main() {\n"+
" texel = texCoord;\n"+
" gl_Position = pMatrix * mvMatrix * vec4(vertex,1.0);\n"+
"}\n",
"precision mediump float;\n"+
"uniform vec4 colour;\n"+
"uniform float fogDensity;\n"+
"uniform vec4 fogColour;\n"+
"varying vec2 texel;\n"+
"uniform sampler2D texture;\n"+
"const float LOG2 = 1.442695;\n"+
"void main() {\n"+
" float z = gl_FragCoord.z / gl_FragCoord.w;\n"+
" float fogFactor = exp2(-fogDensity*fogDensity*z*z*LOG2);\n"+
" fogFactor = clamp(fogFactor,0.0,1.0);\n"+
" vec4 fragColour = texture2D(texture,texel) * colour;\n"+
" gl_FragColor = mix(fogColour,fragColour,fogFactor);\n"+
"}\n",
["mvMatrix","pMatrix","colour","fogDensity","fogColour","texture"],
["vertex","texCoord"]),
plane: [[0,0,0],[0,1,0]],
init: function(w,h) {
this.w = w;
this.h = h;
var vertices = [
w,0,0, 1,0,
0,0,0, 0,0,
w,0,h, 1,1,
w,0,h, 1,1,
0,0,0, 0,0,
0,0,h, 0,1,
];
gl.bindBuffer(gl.ARRAY_BUFFER,this.vbo);
gl.bufferData(gl.ARRAY_BUFFER,new Float32Array(vertices),gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER,null);
this._mapArrayBuffer = new ArrayBuffer(w*h*4);
this._mapByteBuffer = new Uint8Array(this._mapArrayBuffer);
this.map = new Uint32Array(this._mapArrayBuffer);
for(var i=0; i<w*h; i++) // test data: red and green pixels
this.map[i] = Math.random()>0.5? 0xff000080: 0xff008000;
createTexture(this.tex,w,h,this._mapByteBuffer,true);
},
draw: function() {
var program = this.program;
gl.useProgram(program);
gl.uniformMatrix4fv(program.pMatrix,false,camera.pMatrix);
gl.uniformMatrix4fv(program.mvMatrix,false,camera.mvMatrix);
gl.uniform4f(program.colour,1,1,1,1);
gl.uniform4f(program.fogColour,1,1,1,1);
gl.uniform1f(program.fogDensity,0.03);
gl.bindTexture(gl.TEXTURE_2D,this.tex);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MIN_FILTER,gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MAG_FILTER,gl.NEAREST);
gl.bindBuffer(gl.ARRAY_BUFFER,this.vbo);
gl.enableVertexAttribArray(program.vertex);
gl.vertexAttribPointer(program.vertex,3,gl.FLOAT,false,5*4,0);
gl.enableVertexAttribArray(program.texCoord);
gl.vertexAttribPointer(program.texCoord,2,gl.FLOAT,false,5*4,3*4);
gl.drawArrays(gl.TRIANGLES,0,6);
gl.disableVertexAttribArray(program.texCoord);
gl.disableVertexAttribArray(program.vertex);
gl.bindBuffer(gl.ARRAY_BUFFER,null);
gl.bindTexture(gl.TEXTURE_2D,null);
gl.useProgram(null);
},
};
function createTexture(tex,width,height,data,noMipMap) {
tex = tex || gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D,tex);
tex.width = width || data.width;
tex.height = height || data.height;
if(width != null)
gl.texImage2D(gl.TEXTURE_2D,0,gl.RGBA,width,height,0,gl.RGBA,gl.UNSIGNED_BYTE,data || null);
else
gl.texImage2D(gl.TEXTURE_2D,0,gl.RGBA,gl.RGBA,gl.UNSIGNED_BYTE,data);
if(anisotropy)
gl.texParameterf(gl.TEXTURE_2D,anisotropic.TEXTURE_MAX_ANISOTROPY_EXT,anisotropy);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_WRAP_S,gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_WRAP_T,gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MAG_FILTER,gl.LINEAR);
if(!noMipMap && !(tex.width&(tex.width-1)) && !(tex.height&(tex.height-1))) { //pow2
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MIN_FILTER,gl.LINEAR_MIPMAP_LINEAR);
gl.generateMipmap(gl.TEXTURE_2D);
} else
gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MIN_FILTER,gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D,null);
_textures.push(tex);
return tex;
}
[SOLVED] The problem was that I was enabling anisotropy for the texture. Anisotropy trumps gl_NEAREST, it seems. If you want to use gl_NEAREST on a texture, its important to not also have anisotropy set.

Resources