Implementing a gradient shader in three.js - graphics

I am trying to learn about shaders using three.js. What I am trying to do is create a shader that generates gradients to texture planets with. Right now I am just trying to generate one gradient to make sure it works. However, when I apply the shader it only renders one of the colors, and does not create the gradient effect I'm looking for. I can't seem to find where I'm going wrong with my code.
I'm using the Book of Shaders as the basis for my code. Specifically, I was looking at this example, trying to replicate the background color.
Here is my shader code:
<section id="fragmentshader">
#ifdef GL_ES
precision mediump float;
#endif
// #define PI 3.14159265359
uniform vec2 u_resolution;
// uniform vec2 u_mouse;
// uniform float u_time;
vec3 colorA = vec3(0.500,0.141,0.912);
vec3 colorB = vec3(1.000,0.833,0.224);
void main() {
vec2 st = gl_FragCoord.xy/u_resolution.xy;
vec3 color = vec3(0.0);
color = mix( colorA,
colorB,
st.y);
gl_FragColor = vec4(color,1.0);
}
</section>
<section id="vertexshader">
void main() {
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</section>
and my three.js code inside an a-frame component:
var uniforms = {
u_resolution: { type: "v2", value: new THREE.Vector2() },
};
var fShader = $('#fragmentshader');
var vShader = $('#vertexshader');
var geometry = new THREE.SphereGeometry(getRandomInt(100, 250), 20, 20);
// var material = new THREE.MeshBasicMaterial( {wireframe: true });
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vShader.text(),
fragmentShader: fShader.text()
});
var sphere = new THREE.Mesh(geometry, material);
This is what my spheres currently look like

var camera, scene, renderer, mesh, material;
init();
animate();
function init() {
// Renderer.
renderer = new THREE.WebGLRenderer();
//renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
// Add renderer to page
document.body.appendChild(renderer.domElement);
// Create camera.
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 1, 1000);
camera.position.z = 400;
// Create scene.
scene = new THREE.Scene();
var uniforms = {
"color1" : {
type : "c",
value : new THREE.Color(0xffffff)
},
"color2" : {
type : "c",
value : new THREE.Color(0x000000)
},
};
var fShader = document.getElementById('fragmentShader').text;
var vShader = document.getElementById('vertexShader').text;
// Create material
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vShader,
fragmentShader: fShader
});
// Create cube and add to scene.
var geometry = new THREE.BoxGeometry(200, 200, 200);
mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
// Create ambient light and add to scene.
var light = new THREE.AmbientLight(0x404040); // soft white light
scene.add(light);
// Create directional light and add to scene.
var directionalLight = new THREE.DirectionalLight(0xffffff);
directionalLight.position.set(1, 1, 1).normalize();
scene.add(directionalLight);
// Add listener for window resize.
window.addEventListener('resize', onWindowResize, false);
}
function animate() {
requestAnimationFrame(animate);
mesh.rotation.x += 0.005;
mesh.rotation.y += 0.01;
renderer.render(scene, camera);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
<script src="https://rawgit.com/mrdoob/three.js/r86/build/three.min.js"></script>
<script id="vertexShader" type="x-shader/x-vertex">
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
uniform vec3 color1;
uniform vec3 color2;
varying vec2 vUv;
void main() {
gl_FragColor = vec4(mix(color1, color2, vUv.y),1.0);
}
</script>

Related

Turn 3d .obj into a SVG with SVG renderer

Using the WebGLRenderer, successfully loaded an .obj file created in Cinema4d.
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera( 75, window.innerWidth/window.innerHeight, 0.1, 1000 );
camera.position.z = 200;
var renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
var controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.enableDamping = true;
controls.dampingFactor = 0.25;
controls.enableZoom = true;
var keyLight = new THREE.DirectionalLight(new THREE.Color('hsl(30, 100%, 75%)'), 1.0);
keyLight.position.set(-100, 0, 100);
var fillLight = new THREE.DirectionalLight(new THREE.Color('hsl(240, 100%, 75%)'), 0.75);
fillLight.position.set(100, 0, 100);
var backLight = new THREE.DirectionalLight(0xffffff, 1.0);
backLight.position.set(100, 0, -100).normalize();
scene.add(keyLight);
scene.add(fillLight);
scene.add(backLight);
const material = new THREE.LineBasicMaterial( {
color: 0xffffff,
linewidth: 1,
linecap: 'round', //ignored by WebGLRenderer
linejoin: 'round' //ignored by WebGLRenderer
} );
scene.add(material)
var objLoader = new THREE.OBJLoader();
objLoader.setPath('/examples/3d-obj-loader/assets/');
objLoader.load('Untitled2.obj', function (object) {
object.position.y -= 60;
scene.add(object);
});
var animate = function () {
requestAnimationFrame( animate );
controls.update();
renderer.render(scene, camera);
};
animate();
Aiming to convert this to vector using the SVGRenderer - tried swapping out the THREE.WebGLRenderer for SVGRenderer but there's clearly more to it than this, and can't find any walkthroughs, or breakdowns on here.
Can see that what I'm looking for is technically possible with three.js!
Linked an image of the 3d file I'm looking to vectorise - a simple mountain style scene - aiming for each edges of the vertices to be stroked, with no texture needed (white shape, black edges).
Tried making a JSON with Bodymovin and proved impossible to join al the paths of the vertices without lots of stray lines appearing.
enter image description here

Coloring faces of a Three.js BoxGeometry

I saw some solutions that accessed the THREE.BoxGeometry faces like that:
var geometry = new THREE.BoxGeometry(n,n,n)
let faces = geometry.faces;
for (let face of faces) {
face.color.set(Math.random() * 0xffffff);
}
But it seems that the faces-array doesn't exist in the current Three.js version r129 (Uncaught TypeError: faces is not iterable).
How can I achieve an easy coloring of the six cube faces?
Try it like so:
let camera, scene, renderer, mesh;
init();
animate();
function init() {
camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 0.01, 10 );
camera.position.z = 1;
scene = new THREE.Scene();
const geometry = new THREE.BoxGeometry( 0.2, 0.2, 0.2 ).toNonIndexed();
// vertexColors must be true so vertex colors can be used in the shader
const material = new THREE.MeshBasicMaterial( { vertexColors: true } );
// generate color data for each vertex
const positionAttribute = geometry.getAttribute( 'position' );
const colors = [];
const color = new THREE.Color();
for ( let i = 0; i < positionAttribute.count; i += 3 ) {
color.set( Math.random() * 0xffffff );
// define the same color for each vertex of a triangle
colors.push( color.r, color.g, color.b );
colors.push( color.r, color.g, color.b );
colors.push( color.r, color.g, color.b );
}
// define the new attribute
geometry.setAttribute( 'color', new THREE.Float32BufferAttribute( colors, 3 ) );
mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
}
function animate() {
requestAnimationFrame( animate );
mesh.rotation.x += 0.01;
mesh.rotation.y += 0.02;
renderer.render( scene, camera );
}
body {
margin: 0;
}
<script src="https://cdn.jsdelivr.net/npm/three#0.129.0/build/three.js"></script>
The previous geometry format was removed from the core with r125. More information in the following post at the three.js forum.
https://discourse.threejs.org/t/three-geometry-will-be-removed-from-core-with-r125/22401

WebGL creating multiple objects?

So I am trying to create circles using the midpoint algorithm. I'm having trouble on how to handle buffers and basically get WebGL properly set up. Using the console I can see that the algorithm is working fine and making the vertex arrray, but I need help understanding what to do with the use.Program, createBuffers, drawArrays. Where should I place them?
Also, should I concat the circle everytime I call it in the START() function?
like: circle(blah blah).concat(circle(blah blah));
var vertexShaderText =
[
'precision mediump float;',
'',
'attribute vec2 vertPosition;',
'attribute vec3 vertColor;',
'varying vec3 fragColor;',
'',
'void main()',
'{',
' fragColor = vertColor;',
' gl_Position = vec4(vertPosition, 0.0, 1.0);',
'}'
].join('\n');
var fragmentShaderText =
[
'precision mediump float;',
'',
'varying vec3 fragColor;',
'void main()',
'{',
' gl_FragColor = vec4(fragColor, 1.0);',
'}'
].join('\n');
var START = function () {
console.log('This is working');
var canvas = document.getElementById('sky');
var gl = canvas.getContext('webgl');
if (!gl) {
console.log('WebGL not supported, falling back on experimental-webgl');
gl = canvas.getContext('experimental-webgl');
}
if (!gl) {
alert('Your browser does not support WebGL');
}
gl.clearColor(.3, .3, .7, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Create shaders
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
var fragmentShader =
gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(vertexShader, vertexShaderText);
gl.shaderSource(fragmentShader, fragmentShaderText);
//create a program for the shaders
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
gl.useProgram(program);
var circle = function (xmid, ymid, r) {
var points = [];
var x = 0;
var y = r;
var pk = 5/4 - r;
while (x < y)
{
if (pk < 0)
{
x++;
pk += 2*x + 1;
}
else
{
x++;
y--;
pk += 2 * (x-y) + 1;
}
points.push(x+xmid, y+ymid);
points.push(x+xmid, -y+ymid);
points.push(-x+xmid, y+ymid);
points.push(-x+xmid, -y+ymid);
points.push(y+xmid, x+ymid);
points.push(y+xmid, -x+ymid);
points.push(-y+xmid, x+ymid);
points.push(-y+xmid, -x+ymid);
}
var cbuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cbuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(points),
gl.STATIC_DRAW);
gl.drawArrays(gl.POINTS, 0, points.length/2);
var positionAttribLocation = gl.getAttribLocation(program,
'vertPosition');
var colorAttribLocation = gl.getAttribLocation(program,
'vertColor');
gl.vertexAttribPointer(
positionAttribLocation, // Attribute location
2, // Number of elements per attribute
gl.FLOAT, // Type of elements
gl.FALSE,
5 * Float32Array.BYTES_PER_ELEMENT, // Size of an individual vertex
0 // Offset from the beginning of a single vertex to this attribute
);
gl.enableVertexAttribArray(positionAttribLocation);
gl.enableVertexAttribArray(colorAttribLocation);
return points;
}
circle(0.6, 0.6, 0.18);
circle(0.9, 0.6, 0.18);
circle(0.5, 0.4, 0.18);
circle(1.0, 0.4, 0.18);
circle(0.75, 0.4, 0.18);
circle(0.75, 0.4, 0.18);
}
START();
<canvas id="sky"></canvas>
This is what my console log is saying:
6WebGL: INVALID_OPERATION: useProgram: program not
valid
6WebGL: INVALID_OPERATION: drawArrays: no valid shader
program in use
12WebGL: INVALID_OPERATION: getAttribLocation: program
not linked
You can clearly see that I am linking and using the program at the very beginning. So what gives?
There's more than one issue with the code
The shaders are not compiled
After setting the shader source with gl.shaderSource you need
to compile them with gl.compileShader. You should also
be checking for errors by calling gl.getShaderParameter(shader, gl.COMPILE_STATUS)
and you should be checking for errors after linking by calling
gl.getProgramParameter(program, gl.LINK_STATUS)
gl.drawArrays is called before setting the attributes
The code is enabling 2 attributes but only supplying data for 1 attribute.
The code is drawing gl.POINTS but the vertex shader is not setting gl_PointSize
I also don't really understand your circle code but since I don't know what it's really trying to do I can't fix it.
And finally you should probably read some tutorials on WebGL
I'd also suggest you use multiline template literals for your shaders
const vertexShaderText = `
precision mediump float;
attribute vec2 vertPosition;
attribute vec3 vertColor;
varying vec3 fragColor;
void main()
{
fragColor = vertColor;
gl_Position = vec4(vertPosition, 0.0, 1.0);
gl_PointSize = 5.;
}
`;
const fragmentShaderText = `
precision mediump float;
varying vec3 fragColor;
void main()
{
gl_FragColor = vec4(fragColor, 1.0);
}
`;
const start = function () {
console.log('This is working');
const canvas = document.getElementById('sky');
const gl = canvas.getContext('webgl');
if (!gl) {
alert('Your browser does not support WebGL');
return;
}
gl.clearColor(.3, .3, .7, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
//create a shader program
const program = createProgram(gl, vertexShaderText, fragmentShaderText);
gl.useProgram(program);
const circle = function (xmid, ymid, r) {
const points = [];
let x = 0;
let y = r;
let pk = 5/4 - r;
while (x < y)
{
if (pk < 0)
{
x++;
pk += 2*x + 1;
}
else
{
x++;
y--;
pk += 2 * (x-y) + 1;
}
points.push(x+xmid, y+ymid);
points.push(x+xmid, -y+ymid);
points.push(-x+xmid, y+ymid);
points.push(-x+xmid, -y+ymid);
points.push(y+xmid, x+ymid);
points.push(y+xmid, -x+ymid);
points.push(-y+xmid, x+ymid);
points.push(-y+xmid, -x+ymid);
}
const cbuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cbuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(points), gl.STATIC_DRAW);
const positionAttribLocation = gl.getAttribLocation(program, 'vertPosition');
const colorAttribLocation = gl.getAttribLocation(program, 'vertColor');
gl.vertexAttribPointer(
positionAttribLocation, // Attribute location
2, // Number of elements per attribute
gl.FLOAT, // Type of elements
gl.FALSE,
0, // Size of an individual vertex
0 // Offset from the beginning of a single vertex to this attribute
);
gl.enableVertexAttribArray(positionAttribLocation);
// you probably meant to supply colors for this attribute
// since if you wanted a constant color you'd have probably
// used a uniform but since you didn't we'll set a constant
// color
gl.vertexAttrib4f(colorAttribLocation, 1, 0, 0, 1);
gl.drawArrays(gl.POINTS, 0, points.length/2);
return points;
}
circle(0.6, 0.6, 0.18);
circle(0.9, 0.6, 0.18);
circle(0.5, 0.4, 0.18);
circle(1.0, 0.4, 0.18);
circle(0.75, 0.4, 0.18);
circle(0.75, 0.4, 0.18);
}
function createProgram(gl, vertexShaderText, fragmentShaderText) {
// Create shaders
const vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderText);
const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentShaderText);
const program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
return null;
}
return program;
}
function createShader(gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
start();
<canvas id="sky"></canvas>

Three.js scene does not render in Safari 11.0.2

I'm trying to determine why a Three.js scene does not render in Safari 11.0.2 (OSX 10.12.6).
/**
* Generate a scene object with a background color
**/
function getScene() {
var scene = new THREE.Scene();
scene.background = new THREE.Color(0x111111);
return scene;
}
/**
* Generate the camera to be used in the scene. Camera args:
* [0] field of view: identifies the portion of the scene
* visible at any time (in degrees)
* [1] aspect ratio: identifies the aspect ratio of the
* scene in width/height
* [2] near clipping plane: objects closer than the near
* clipping plane are culled from the scene
* [3] far clipping plane: objects farther than the far
* clipping plane are culled from the scene
**/
function getCamera() {
var aspectRatio = window.innerWidth / window.innerHeight;
var camera = new THREE.PerspectiveCamera(75, aspectRatio, 0.1, 10000);
camera.position.set(0,150,400);
camera.lookAt(scene.position);
return camera;
}
/**
* Generate the light to be used in the scene. Light args:
* [0]: Hexadecimal color of the light
* [1]: Numeric value of the light's strength/intensity
* [2]: The distance from the light where the intensity is 0
* #param {obj} scene: the current scene object
**/
function getLight(scene) {
var lights = [];
lights[0] = new THREE.PointLight( 0xffffff, 0.6, 0 );
lights[0].position.set( 100, 200, 100 );
scene.add( lights[0] );
var ambientLight = new THREE.AmbientLight(0x111111);
scene.add(ambientLight);
return light;
}
/**
* Generate the renderer to be used in the scene
**/
function getRenderer() {
// Create the canvas with a renderer
var renderer = new THREE.WebGLRenderer({antialias: true});
// Add support for retina displays
renderer.setPixelRatio(window.devicePixelRatio);
// Specify the size of the canvas
renderer.setSize(window.innerWidth, window.innerHeight);
// Add the canvas to the DOM
document.body.appendChild(renderer.domElement);
return renderer;
}
/**
* Generate the controls to be used in the scene
* #param {obj} camera: the three.js camera for the scene
* #param {obj} renderer: the three.js renderer for the scene
**/
function getControls(camera, renderer) {
var controls = new THREE.TrackballControls(camera, renderer.domElement);
controls.zoomSpeed = 0.4;
controls.panSpeed = 0.4;
return controls;
}
/**
* Get grass
**/
function getPlane(scene, loader) {
var texture = loader.load('grass.jpg');
texture.wrapS = texture.wrapT = THREE.RepeatWrapping;
texture.repeat.set( 10, 10 );
var material = new THREE.MeshBasicMaterial({
map: texture, side: THREE.DoubleSide
});
var geometry = new THREE.PlaneGeometry(1000, 1000, 10, 10);
var plane = new THREE.Mesh(geometry, material);
plane.position.y = -0.5;
plane.rotation.x = Math.PI / 2;
scene.add(plane);
return plane;
}
/**
* Add background
**/
function getBackground(scene, loader) {
var imagePrefix = '';
var directions = ['right', 'left', 'top', 'bottom', 'front', 'back'];
var imageSuffix = '.bmp';
var geometry = new THREE.BoxGeometry( 1000, 1000, 1000 );
var materialArray = [];
for (var i = 0; i < 6; i++)
materialArray.push( new THREE.MeshBasicMaterial({
map: loader.load(imagePrefix + directions[i] + imageSuffix),
side: THREE.BackSide
}));
var sky = new THREE.Mesh( geometry, materialArray );
scene.add(sky);
}
/**
* Add a character
**/
function getSphere(scene) {
var geometry = new THREE.SphereGeometry( 30, 12, 9 );
var material = new THREE.MeshPhongMaterial({
color: 0xd0901d,
emissive: 0xaf752a,
side: THREE.DoubleSide,
flatShading: true
});
var sphere = new THREE.Mesh( geometry, material );
// create a group for translations and rotations
var sphereGroup = new THREE.Group();
sphereGroup.add(sphere)
sphereGroup.position.set(0, 24, 100);
scene.add(sphereGroup);
return [sphere, sphereGroup];
}
/**
* Store all currently pressed keys
**/
function addListeners() {
window.addEventListener('keydown', function(e) {
pressed[e.key.toUpperCase()] = true;
})
window.addEventListener('keyup', function(e) {
pressed[e.key.toUpperCase()] = false;
})
}
/**
* Update the sphere's position
**/
function moveSphere() {
var delta = clock.getDelta(); // seconds
var moveDistance = 200 * delta; // 200 pixels per second
var rotateAngle = Math.PI / 2 * delta; // pi/2 radians (90 deg) per sec
// move forwards/backwards/left/right
if ( pressed['W'] ) {
sphere.rotateOnAxis(new THREE.Vector3(1,0,0), -rotateAngle)
sphereGroup.translateZ( -moveDistance );
}
if ( pressed['S'] )
sphereGroup.translateZ( moveDistance );
if ( pressed['Q'] )
sphereGroup.translateX( -moveDistance );
if ( pressed['E'] )
sphereGroup.translateX( moveDistance );
// rotate left/right/up/down
var rotation_matrix = new THREE.Matrix4().identity();
if ( pressed['A'] )
sphereGroup.rotateOnAxis(new THREE.Vector3(0,1,0), rotateAngle);
if ( pressed['D'] )
sphereGroup.rotateOnAxis(new THREE.Vector3(0,1,0), -rotateAngle);
if ( pressed['R'] )
sphereGroup.rotateOnAxis(new THREE.Vector3(1,0,0), rotateAngle);
if ( pressed['F'] )
sphereGroup.rotateOnAxis(new THREE.Vector3(1,0,0), -rotateAngle);
}
/**
* Follow the sphere
**/
function moveCamera() {
var relativeCameraOffset = new THREE.Vector3(0,50,200);
var cameraOffset = relativeCameraOffset.applyMatrix4(sphereGroup.matrixWorld);
camera.position.x = cameraOffset.x;
camera.position.y = cameraOffset.y;
camera.position.z = cameraOffset.z;
camera.lookAt(sphereGroup.position);
}
// Render loop
function render() {
requestAnimationFrame(render);
renderer.render(scene, camera);
moveSphere();
moveCamera();
};
// state
var pressed = {};
var clock = new THREE.Clock();
// globals
var scene = getScene();
var camera = getCamera();
var light = getLight(scene);
var renderer = getRenderer();
// add meshes
var loader = new THREE.TextureLoader();
var floor = getPlane(scene, loader);
var background = getBackground(scene, loader);
var sphereData = getSphere(scene);
var sphere = sphereData[0];
var sphereGroup = sphereData[1];
addListeners();
render();
body { margin: 0; overflow: hidden; }
canvas { width: 100%; height: 100%; }
<script src='https://cdnjs.cloudflare.com/ajax/libs/three.js/88/three.min.js'></script>
<script src='https://threejs.org/examples/js/controls/TrackballControls.js'></script>
More generally, all of the examples at shadertoy.com [example] either do not appear or appear very faintly and almost entirely in white on Safari 11.0.2.
The same holds for the "Safari Technology Preview" even after I turn on all experimental web features, including WebGL 2.0.
I'd like to figure out how to make the scene render, but I'm more interested in learning how others attempt to debug this kind of problem. Are there tools or resources that can help one pinpoint this kind of problem (like a developer tools just for WebGL)?
This looks like a compositing bug in Safari. Hopefully Apple will fix it.
There are several workrounds. The easist seems to be to set the background color of the body or canvas to black.
/**
* Generate a scene object with a background color
**/
function getScene() {
var scene = new THREE.Scene();
scene.background = new THREE.Color(0x111111);
return scene;
}
/**
* Generate the camera to be used in the scene. Camera args:
* [0] field of view: identifies the portion of the scene
* visible at any time (in degrees)
* [1] aspect ratio: identifies the aspect ratio of the
* scene in width/height
* [2] near clipping plane: objects closer than the near
* clipping plane are culled from the scene
* [3] far clipping plane: objects farther than the far
* clipping plane are culled from the scene
**/
function getCamera() {
var aspectRatio = window.innerWidth / window.innerHeight;
var camera = new THREE.PerspectiveCamera(75, aspectRatio, 0.1, 10000);
camera.position.set(0,150,400);
camera.lookAt(scene.position);
return camera;
}
/**
* Generate the light to be used in the scene. Light args:
* [0]: Hexadecimal color of the light
* [1]: Numeric value of the light's strength/intensity
* [2]: The distance from the light where the intensity is 0
* #param {obj} scene: the current scene object
**/
function getLight(scene) {
var lights = [];
lights[0] = new THREE.PointLight( 0xffffff, 0.6, 0 );
lights[0].position.set( 100, 200, 100 );
scene.add( lights[0] );
var ambientLight = new THREE.AmbientLight(0x111111);
scene.add(ambientLight);
return light;
}
/**
* Generate the renderer to be used in the scene
**/
function getRenderer() {
// Create the canvas with a renderer
var renderer = new THREE.WebGLRenderer({antialias: true});
// Add support for retina displays
renderer.setPixelRatio(window.devicePixelRatio);
// Specify the size of the canvas
renderer.setSize(window.innerWidth, window.innerHeight);
// Add the canvas to the DOM
document.body.appendChild(renderer.domElement);
return renderer;
}
/**
* Generate the controls to be used in the scene
* #param {obj} camera: the three.js camera for the scene
* #param {obj} renderer: the three.js renderer for the scene
**/
function getControls(camera, renderer) {
var controls = new THREE.TrackballControls(camera, renderer.domElement);
controls.zoomSpeed = 0.4;
controls.panSpeed = 0.4;
return controls;
}
/**
* Get grass
**/
function getPlane(scene, loader) {
var texture = loader.load('grass.jpg');
texture.wrapS = texture.wrapT = THREE.RepeatWrapping;
texture.repeat.set( 10, 10 );
var material = new THREE.MeshBasicMaterial({
map: texture, side: THREE.DoubleSide
});
var geometry = new THREE.PlaneGeometry(1000, 1000, 10, 10);
var plane = new THREE.Mesh(geometry, material);
plane.position.y = -0.5;
plane.rotation.x = Math.PI / 2;
scene.add(plane);
return plane;
}
/**
* Add background
**/
function getBackground(scene, loader) {
var imagePrefix = '';
var directions = ['right', 'left', 'top', 'bottom', 'front', 'back'];
var imageSuffix = '.bmp';
var geometry = new THREE.BoxGeometry( 1000, 1000, 1000 );
var materialArray = [];
for (var i = 0; i < 6; i++)
materialArray.push( new THREE.MeshBasicMaterial({
map: loader.load(imagePrefix + directions[i] + imageSuffix),
side: THREE.BackSide
}));
var sky = new THREE.Mesh( geometry, materialArray );
scene.add(sky);
}
/**
* Add a character
**/
function getSphere(scene) {
var geometry = new THREE.SphereGeometry( 30, 12, 9 );
var material = new THREE.MeshPhongMaterial({
color: 0xd0901d,
emissive: 0xaf752a,
side: THREE.DoubleSide,
flatShading: true
});
var sphere = new THREE.Mesh( geometry, material );
// create a group for translations and rotations
var sphereGroup = new THREE.Group();
sphereGroup.add(sphere)
sphereGroup.position.set(0, 24, 100);
scene.add(sphereGroup);
return [sphere, sphereGroup];
}
/**
* Store all currently pressed keys
**/
function addListeners() {
window.addEventListener('keydown', function(e) {
pressed[e.key.toUpperCase()] = true;
})
window.addEventListener('keyup', function(e) {
pressed[e.key.toUpperCase()] = false;
})
}
/**
* Update the sphere's position
**/
function moveSphere() {
var delta = clock.getDelta(); // seconds
var moveDistance = 200 * delta; // 200 pixels per second
var rotateAngle = Math.PI / 2 * delta; // pi/2 radians (90 deg) per sec
// move forwards/backwards/left/right
if ( pressed['W'] ) {
sphere.rotateOnAxis(new THREE.Vector3(1,0,0), -rotateAngle)
sphereGroup.translateZ( -moveDistance );
}
if ( pressed['S'] )
sphereGroup.translateZ( moveDistance );
if ( pressed['Q'] )
sphereGroup.translateX( -moveDistance );
if ( pressed['E'] )
sphereGroup.translateX( moveDistance );
// rotate left/right/up/down
var rotation_matrix = new THREE.Matrix4().identity();
if ( pressed['A'] )
sphereGroup.rotateOnAxis(new THREE.Vector3(0,1,0), rotateAngle);
if ( pressed['D'] )
sphereGroup.rotateOnAxis(new THREE.Vector3(0,1,0), -rotateAngle);
if ( pressed['R'] )
sphereGroup.rotateOnAxis(new THREE.Vector3(1,0,0), rotateAngle);
if ( pressed['F'] )
sphereGroup.rotateOnAxis(new THREE.Vector3(1,0,0), -rotateAngle);
}
/**
* Follow the sphere
**/
function moveCamera() {
var relativeCameraOffset = new THREE.Vector3(0,50,200);
var cameraOffset = relativeCameraOffset.applyMatrix4(sphereGroup.matrixWorld);
camera.position.x = cameraOffset.x;
camera.position.y = cameraOffset.y;
camera.position.z = cameraOffset.z;
camera.lookAt(sphereGroup.position);
}
// Render loop
function render() {
requestAnimationFrame(render);
renderer.render(scene, camera);
moveSphere();
moveCamera();
};
// state
var pressed = {};
var clock = new THREE.Clock();
// globals
var scene = getScene();
var camera = getCamera();
var light = getLight(scene);
var renderer = getRenderer();
// add meshes
var loader = new THREE.TextureLoader();
var floor = getPlane(scene, loader);
var background = getBackground(scene, loader);
var sphereData = getSphere(scene);
var sphere = sphereData[0];
var sphereGroup = sphereData[1];
addListeners();
render();
body { margin: 0; overflow: hidden; }
canvas { width: 100%; height: 100%; background: black; }
<script src='https://cdnjs.cloudflare.com/ajax/libs/three.js/88/three.min.js'></script>
<script src='https://threejs.org/examples/js/controls/TrackballControls.js'></script>
As for how to know how to find this bugs, in this particular case I don't know how I knew except experience. I know it's unfortunately common for browsers to get compositing bugs with WebGL because it's very hard to test. Most browsers test on servers without GPUs which means they don't test WebGL enough. They built their testing systems before compositing was GPU accelerated. Another reason is testing compositing is something that's browser specific so the WebGL tests can't include a test for it. It's something each browser vendor has to implement their own tests for and often their testing systems run the browsers in non-release modes or the APIs that might make it possible to test don't actually go through the same code as the code the draws to the screen .
For WebGL, you should generally get the same results across browsers and compositing issues are the most common place they get it wrong. Especially when not using the defauts. So, first I checked the if the context was set up non-default as in either alpha: false or premultipliedAlpha: false etc.. To do that I just opened Chrome's dev tools and selected the snippet context
Once I had the correct debugger context I just got the WebGL context from the first canvas
I saw alpha: false which is not the default so that was the first clue. If there was more than one canvas I would have had to use 'querySelectorAll' and try each canvas until I got the WebGL one.
Then I also saw your CSS is different than I would do it. I would have used
body { margin: 0; }
canvas { width: 100vw; height: 100vw; display: block; }
No need for overflow: hidden and clearly states what I want. I have strong opinions that the way most three.js apps size the canvas is an anti-pattern.
I saw that you set your css to make the canvas height 100% but you didn't set the body height and so if nothing else was done your canvas would have zero height. So, I set the background color of the canvas so I could see how big it was. I was assuming it was actually zero. That's when (a) I saw it was rendering and setting the background color made it appear and (b) your canvas appears because three.js is hacking in the canvas sizes based on window.innerHeight and also mucking with your css

.svg asset in three.js 3d space

I'm trying to load a .svg asset into my three.js scene, as a flat vector layer; I found this example with SVGLoader and SVGRenderer from another post, but I can't make it work.
The svg loaded is stuck in 2d space and not responding to camera movement, I can't access its position.
I tried to switch to WebGLRenderer, but the svg doesn’t get loaded.
The option of loading it as sprite would be good, but I would want the sprite to not face the camera and stay still in 3d space.
var svgManager = new THREE.SVGLoader();
var url = 'https://upload.wikimedia.org/wikipedia/commons/f/f7/Europe_laea_location_map.svg';
function svg_loading_done_callback(doc) {
init();
svg(new THREE.SVGObject(doc));
ico();
animate();
};
svgManager.load(url,
svg_loading_done_callback,
function() {
console.log("Loading SVG...");
},
function() {
console.log("Error loading SVG!");
});
var AMOUNT = 100;
var container, camera, scene, renderer;
function init() {
scene = new THREE.Scene();
renderer = new THREE.SVGRenderer();
renderer.setClearColor(0x00ff00);
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
var container = document.getElementById('container');
container.appendChild(renderer.domElement);
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 10000);
camera.position.z = 1100;
controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.enableZoom = true;
window.addEventListener('resize', onWindowResize, false);
}
function svg(svgObject) {
svgObject.position.x = 510;
svgObject.position.y = -110;
svgObject.position.z = 0;
scene.add(svgObject);
}
function ico() {
geometry = new THREE.IcosahedronGeometry(100, 1)
material = new THREE.MeshNormalMaterial({});
ico = new THREE.Mesh(geometry, material);
ico.position.y = -300;
scene.add(ico);
ico2 = new THREE.Mesh(geometry, material);
ico2.position.y = 500;
ico2.position.x = -500;
ico2.position.z = -50;
scene.add(ico2);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function animate() {
requestAnimationFrame(animate);
controls.update;
render();
}
function render() {
renderer.render(scene, camera);
}
body {
margin: 0;
}
canvas {
width: 100%;
height: 100%
}
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<div id="container"></div>
<script src="https://threejs.org/build/three.min.js"></script>
<script src="https://threejs.org/examples/js/renderers/SVGRenderer.js"></script>
<script src="https://threejs.org/examples/js/renderers/Projector.js"></script>
<script src="https://threejs.org/examples/js/loaders/SVGLoader.js"></script>
<script src="https://threejs.org/examples/js/libs/stats.min.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
The SVGLoader and SVGRenderer are two different things. The first loads an SVG file and converts it to three.js shapes (albeit with some limitations, i.e. can read very simple SVGs, does not render strokes but only filled objects, etc), while the latter renders three.js primitives using SVG elements instead of WebGL. In a sense, they are opposites of each other.
So, first of all, you'd need to use the WebGLRenderer for your case.
Then, you need to change the SVG loading callback. It receives an array of paths with which you can render the SVG.
See the changes in functions svg_loading_done_callback, init and svg, and run it in JSFiddle:
var svgManager = new THREE.SVGLoader();
var url = 'https://upload.wikimedia.org/wikipedia/commons/f/f7/Europe_laea_location_map.svg';
function svg_loading_done_callback(paths) {
init();
svg(paths);
ico();
animate();
};
svgManager.load(url,
svg_loading_done_callback,
function() {
console.log("Loading SVG...");
},
function() {
console.log("Error loading SVG!");
});
var AMOUNT = 100;
var container, camera, scene, renderer;
function init() {
scene = new THREE.Scene();
renderer = new THREE.WebGLRenderer();
renderer.setClearColor(0x00ff00);
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
var container = document.getElementById('container');
container.appendChild(renderer.domElement);
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 1, 10000);
camera.position.z = 1100;
controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.enableZoom = true;
window.addEventListener('resize', onWindowResize, false);
}
function svg(paths) {
var group = new THREE.Group();
group.position.x = 510;
group.position.y = -110;
group.position.z = 0;
for ( var i = 0; i < paths.length; i ++ ) {
var path = paths[ i ];
var material = new THREE.MeshBasicMaterial( {
color: path.color,
side: THREE.DoubleSide,
depthWrite: false
} );
var shapes = path.toShapes( true );
for ( var j = 0; j < shapes.length; j ++ ) {
var shape = shapes[ j ];
var geometry = new THREE.ShapeBufferGeometry( shape );
var mesh = new THREE.Mesh( geometry, material );
group.add( mesh );
}
}
scene.add( group );
}
function ico() {
geometry = new THREE.IcosahedronGeometry(100, 1)
material = new THREE.MeshNormalMaterial({});
ico = new THREE.Mesh(geometry, material);
ico.position.y = -300;
scene.add(ico);
ico2 = new THREE.Mesh(geometry, material);
ico2.position.y = 500;
ico2.position.x = -500;
ico2.position.z = -50;
scene.add(ico2);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function animate() {
requestAnimationFrame(animate);
controls.update;
render();
}
function render() {
renderer.render(scene, camera);
}
PS: Check the SVG Loader to see what it's able to parse

Resources