threeJs 样例,webgl2_volume_perlin的修改

官方例子 webgl2_volume_perlin.html, 把perlin噪音绘制出来了,可以直观感受 perlin噪音的图像 形状。原例子支持立方体,我把立方体改为了球形。

<!DOCTYPE html>
<html lang="en">
	<head>
		<title>three.js webgl2 - volume</title>
		<meta charset="utf-8">
		<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
		<link type="text/css" rel="stylesheet" href="main.css">
	</head>

	<body>
		<div id="info">
			<a href="https://threejs.org" target="_blank" rel="noopener">three.js</a> webgl2 - volume
		</div>

		<!-- Import maps polyfill -->
		<!-- Remove this when import maps will be widely supported -->
		<script async src="https://unpkg.com/[email protected]/dist/es-module-shims.js"></script>

		<script type="importmap">
			{
				"imports": {
					"three": "../build/three.module.js",
					"three/addons/": "./jsm/"
				}
			}
		</script>

		<script type="module">
			import * as THREE from 'three';
			import { OrbitControls } from 'three/addons/controls/OrbitControls.js';
			import { ImprovedNoise } from 'three/addons/math/ImprovedNoise.js';

			import { GUI } from 'three/addons/libs/lil-gui.module.min.js';
			import WebGL from 'three/addons/capabilities/WebGL.js';

			if ( WebGL.isWebGL2Available() === false ) {

				document.body.appendChild( WebGL.getWebGL2ErrorMessage() );

			}

			let renderer, scene, camera;
			let mesh;

			init();
			animate();

			function init() {

				renderer = new THREE.WebGLRenderer();
				renderer.setPixelRatio( window.devicePixelRatio );
				renderer.setSize( window.innerWidth, window.innerHeight );
				document.body.appendChild( renderer.domElement );

				scene = new THREE.Scene();
				scene.add(new THREE.AxesHelper(2000))

				camera = new THREE.PerspectiveCamera( 60, window.innerWidth / window.innerHeight, 0.1, 100 );
				camera.position.set( 0, 0, 2 );

				new OrbitControls( camera, renderer.domElement );

				// Texture

				const size = 128;
				const data = new Uint8Array( size * size * size );

				let i = 0;
				const perlin = new ImprovedNoise();
				const vector = new THREE.Vector3();

				for ( let z = 0; z < size; z ++ ) {

					for ( let y = 0; y < size; y ++ ) {

						for ( let x = 0; x < size; x ++ ) {

							vector.set( x, y, z ).divideScalar( size );

							const d = perlin.noise( vector.x * 6.5, vector.y * 6.5, vector.z * 6.5 );

							data[ i ++ ] = d * 128 + 128;

						}

					}

				}

				const texture = new THREE.Data3DTexture( data, size, size, size );
				texture.format = THREE.RedFormat;
				texture.minFilter = THREE.LinearFilter;
				texture.magFilter = THREE.LinearFilter;
				texture.unpackAlignment = 1;
				texture.needsUpdate = true;

				// Material

				const vertexShader = /* glsl */`
					in vec3 position;

					uniform mat4 modelMatrix;
					uniform mat4 modelViewMatrix;
					uniform mat4 projectionMatrix;
					uniform vec3 cameraPos;

					out vec3 vOrigin;
					out vec3 vDirection;

					void main() {
						vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );

						vOrigin = vec3( inverse( modelMatrix ) * vec4( cameraPos, 1.0 ) ).xyz;
						vDirection = position - cameraPos;

						gl_Position = projectionMatrix * mvPosition;
					}
				`;

				const fragmentShader = /* glsl */`
					precision highp float;
					precision highp sampler3D;

					uniform mat4 modelViewMatrix;
					uniform mat4 projectionMatrix;
					uniform vec3 cameraPos;

					in vec3 vOrigin;
					in vec3 vDirection;

					out vec4 color;

					uniform sampler3D map;

					uniform float threshold;
					uniform float steps;

					vec2 hitSphere(vec3 orig, vec3 dir) {
						const vec3 sphereCenter = vec3(0.0);
						const float sphereRadius = 0.5;
						vec3 toSphere = sphereCenter - orig;
						float tca = dot(toSphere, dir);
						float d2 = dot(toSphere, toSphere) - tca * tca;
						float radius2 = sphereRadius * sphereRadius;
						if (d2 > radius2) {
							return vec2(1.0e6, 1.0e6);
						}
						float thc = sqrt(radius2 - d2);
						// t0 = first intersect point - entrance on front of sphere
						float t0 = tca - thc;
						// t1 = second intersect point - exit point on back of sphere.
						float t1 = tca + thc;
						if (t1 < 0.0) {
							// t1 is behind the ray
							return vec2(1.0e6, 1.0e6);
						}
						if (t0 < 0.0) {
							// t0 is behind the ray
							// the ray is inside the sphere, in order to always return an intersect point that is in front of the ray
							return vec2(t1, t0);
						}
						return vec2(t0, t1);
					}

					vec2 hitBox( vec3 orig, vec3 dir ) {
						const vec3 box_min = vec3( - 0.5 );
						const vec3 box_max = vec3( 0.5 );
						vec3 inv_dir = 1.0 / dir;
						vec3 tmin_tmp = ( box_min - orig ) * inv_dir;
						vec3 tmax_tmp = ( box_max - orig ) * inv_dir;
						vec3 tmin = min( tmin_tmp, tmax_tmp );
						vec3 tmax = max( tmin_tmp, tmax_tmp );
						float t0 = max( tmin.x, max( tmin.y, tmin.z ) );
						float t1 = min( tmax.x, min( tmax.y, tmax.z ) );
						return vec2( t0, t1 );
					}

					float sample1( vec3 p ) {
						return texture( map, p ).r;
					}

					#define epsilon .0001

					vec3 normal( vec3 coord ) {
						if ( coord.x < epsilon ) return vec3( 1.0, 0.0, 0.0 );
						if ( coord.y < epsilon ) return vec3( 0.0, 1.0, 0.0 );
						if ( coord.z < epsilon ) return vec3( 0.0, 0.0, 1.0 );
						if ( coord.x > 1.0 - epsilon ) return vec3( - 1.0, 0.0, 0.0 );
						if ( coord.y > 1.0 - epsilon ) return vec3( 0.0, - 1.0, 0.0 );
						if ( coord.z > 1.0 - epsilon ) return vec3( 0.0, 0.0, - 1.0 );

						float step = 0.01;
						float x = sample1( coord + vec3( - step, 0.0, 0.0 ) ) - sample1( coord + vec3( step, 0.0, 0.0 ) );
						float y = sample1( coord + vec3( 0.0, - step, 0.0 ) ) - sample1( coord + vec3( 0.0, step, 0.0 ) );
						float z = sample1( coord + vec3( 0.0, 0.0, - step ) ) - sample1( coord + vec3( 0.0, 0.0, step ) );

						return normalize( vec3( x, y, z ) );
					}

					void main(){

						vec3 rayDir = normalize( vDirection );
						// vec2 bounds = hitBox( cameraPos, rayDir );
						vec2 bounds = hitSphere( cameraPos, rayDir );

						if ( bounds.x > bounds.y ) discard;

						bounds.x = max( bounds.x, 0.0 );

						vec3 p = cameraPos + bounds.x * rayDir;
						vec3 inc = 1.0 / abs( rayDir );
						float delta = min( inc.x, min( inc.y, inc.z ) );
						delta /= steps;
						// color.rgb = normal( p + 0.5 ) * 0.5 + ( p * 1.5 + 0.25 );
						// color.rgb = p;
						// color.a = 1.;

						for ( float t = bounds.x; t < bounds.y; t += delta ) {

							float d = sample1( p + 0.5 );

							if ( d > threshold ) {

								color.rgb = normal( p + 0.5 ) * 0.5 + ( p * 1.5 + 0.25 );
								color.a = 1.;
								break;

							}

							p += rayDir * delta;

						}

						if ( color.a == 0.0 ) discard;

					}
				`;

				// const geometry = new THREE.BoxGeometry( 1, 1, 1 );
				const geometry = new THREE.SphereGeometry(1)
				const material = new THREE.RawShaderMaterial( {
					glslVersion: THREE.GLSL3,
					uniforms: {
						map: { value: texture },
						cameraPos: { value: new THREE.Vector3() },
						threshold: { value: 0.6 },
						steps: { value: 200 }
					},
					vertexShader,
					fragmentShader,
					side: THREE.BackSide,
				} );

				mesh = new THREE.Mesh( geometry, material );
				scene.add( mesh );

				//

				const parameters = { threshold: 0.6, steps: 200 };

				function update() {

					material.uniforms.threshold.value = parameters.threshold;
					material.uniforms.steps.value = parameters.steps;

				}

				const gui = new GUI();
				gui.add( parameters, 'threshold', 0, 1, 0.01 ).onChange( update );
				gui.add( parameters, 'steps', 0, 300, 1 ).onChange( update );

				window.addEventListener( 'resize', onWindowResize );

			}

			function onWindowResize() {

				camera.aspect = window.innerWidth / window.innerHeight;
				camera.updateProjectionMatrix();

				renderer.setSize( window.innerWidth, window.innerHeight );

			}

			function animate() {

				requestAnimationFrame( animate );

				mesh.material.uniforms.cameraPos.value.copy( camera.position );

				renderer.render( scene, camera );

			}

		</script>

	</body>
</html>

片源着色器中的 hitBox( vec3 orig, vec3 dir ) 函数和 src/math/Ray.js 中的
intersectBox( box, target ) 是一样的。为方便理解,可以代入几个特殊变量,比如 ray origin 为 (10,10, 0),ray direction为 y = x 的射线,和x轴的夹角为 45度,这条射线去撞击 中心在原点,边长为1的正方体。

需要注意的是

			// color.rgb = p;
			// color.a = 1.;

这两行代码取消注释,之后的代码注释掉,运行代码,可以得到一个彩色立方体,为什么在坐标 (-1,-1,-1)所在的空间象限,立方体的这个部分是黑色的呢?
应该是这行代码 vec3 p = cameraPos + bounds.x * rayDir; 得到的 p点实际上是始终位于 被撞击立方体的表面,(-1,-1,-1)附近的 p点,rgb三个分量都是负数,负数被截取为零,所以 (-1,-1,-1)空间象限的 p点 呈现为黑色

color.rgb = normal( p + 0.5 ) * 0.5 + ( p * 1.5 + 0.25 );

以上代码 为什么 有p + 0.5 ,因为p的三个分量 始终位于闭区间 [-0.5, 0.5],加0.5后 变为[0, 1]的闭区间

for ( float t = bounds.x; t < bounds.y; t += delta ) {

							float d = sample1( p + 0.5 );

							if ( d > threshold ) {

								color.rgb = normal( p + 0.5 ) * 0.5 + ( p * 1.5 + 0.25 );
								color.a = 1.;
								break;

							}

							p += rayDir * delta;

						}

上面的代码 长得像 光线步进 ray marching 算法,但不完全是,因为每次步进的距离都是一个常数,实际是 沿光线 匀速的前进,在光线的等距间隔点上采样 三维的 perlin 噪音纹理;大于某个阈值就停止继续步进。阈值越大,得到的图形越稀疏。

你可能感兴趣的:(javascript,三维开发)