main repo

This commit is contained in:
Basilosaurusrex
2025-11-24 18:09:40 +01:00
parent b636ee5e70
commit f027651f9b
34146 changed files with 4436636 additions and 0 deletions

View File

@@ -0,0 +1,69 @@
import { Mesh, MeshBasicMaterial, SphereGeometry, Vector3 } from 'three';
/**
* A ground-projected skybox.
*
* By default the object is centered at the camera, so it is often helpful to set
* `skybox.position.y = height` to put the ground at the origin.
*
* ```js
* const height = 15, radius = 100;
*
* const skybox = new GroundedSkybox( envMap, height, radius );
* skybox.position.y = height;
* scene.add( skybox );
* ```
*
* @augments Mesh
* @three_import import { GroundedSkybox } from 'three/addons/objects/GroundedSkybox.js';
*/
class GroundedSkybox extends Mesh {
/**
* Constructs a new ground-projected skybox.
*
* @param {Texture} map - The environment map to use.
* @param {number} height - The height is how far the camera that took the photo was above the ground.
* A larger value will magnify the downward part of the image.
* @param {number} radius - The radius of the skybox. Must be large enough to ensure the scene's camera stays inside.
* @param {number} [resolution=128] - The geometry resolution of the skybox.
*/
constructor( map, height, radius, resolution = 128 ) {
if ( height <= 0 || radius <= 0 || resolution <= 0 ) {
throw new Error( 'GroundedSkybox height, radius, and resolution must be positive.' );
}
const geometry = new SphereGeometry( radius, 2 * resolution, resolution );
geometry.scale( 1, 1, - 1 );
const pos = geometry.getAttribute( 'position' );
const tmp = new Vector3();
for ( let i = 0; i < pos.count; ++ i ) {
tmp.fromBufferAttribute( pos, i );
if ( tmp.y < 0 ) {
// Smooth out the transition from flat floor to sphere:
const y1 = - height * 3 / 2;
const f =
tmp.y < y1 ? - height / tmp.y : ( 1 - tmp.y * tmp.y / ( 3 * y1 * y1 ) );
tmp.multiplyScalar( f );
tmp.toArray( pos.array, 3 * i );
}
}
pos.needsUpdate = true;
super( geometry, new MeshBasicMaterial( { map, depthWrite: false } ) );
}
}
export { GroundedSkybox };

489
node_modules/three/examples/jsm/objects/Lensflare.js generated vendored Normal file
View File

@@ -0,0 +1,489 @@
import {
AdditiveBlending,
Box2,
BufferGeometry,
Color,
FramebufferTexture,
InterleavedBuffer,
InterleavedBufferAttribute,
Mesh,
MeshBasicMaterial,
RawShaderMaterial,
UnsignedByteType,
Vector2,
Vector3,
Vector4
} from 'three';
/**
* Creates a simulated lens flare that tracks a light.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link LensflareMesh}.
*
* ```js
* const light = new THREE.PointLight( 0xffffff, 1.5, 2000 );
*
* const lensflare = new Lensflare();
* lensflare.addElement( new LensflareElement( textureFlare0, 512, 0 ) );
* lensflare.addElement( new LensflareElement( textureFlare1, 512, 0 ) );
* lensflare.addElement( new LensflareElement( textureFlare2, 60, 0.6 ) );
*
* light.add( lensflare );
* ```
*
* @augments Mesh
* @three_import import { Lensflare } from 'three/addons/objects/Lensflare.js';
*/
class Lensflare extends Mesh {
/**
* Constructs a new lensflare.
*/
constructor() {
super( Lensflare.Geometry, new MeshBasicMaterial( { opacity: 0, transparent: true } ) );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isLensflare = true;
this.type = 'Lensflare';
/**
* Overwritten to disable view-frustum culling by default.
*
* @type {boolean}
* @default false
*/
this.frustumCulled = false;
/**
* Overwritten to make sure lensflares a rendered last.
*
* @type {number}
* @default Infinity
*/
this.renderOrder = Infinity;
//
const positionScreen = new Vector3();
const positionView = new Vector3();
// textures
const tempMap = new FramebufferTexture( 16, 16 );
const occlusionMap = new FramebufferTexture( 16, 16 );
let currentType = UnsignedByteType;
// material
const geometry = Lensflare.Geometry;
const material1a = new RawShaderMaterial( {
uniforms: {
'scale': { value: null },
'screenPosition': { value: null }
},
vertexShader: /* glsl */`
precision highp float;
uniform vec3 screenPosition;
uniform vec2 scale;
attribute vec3 position;
void main() {
gl_Position = vec4( position.xy * scale + screenPosition.xy, screenPosition.z, 1.0 );
}`,
fragmentShader: /* glsl */`
precision highp float;
void main() {
gl_FragColor = vec4( 1.0, 0.0, 1.0, 1.0 );
}`,
depthTest: true,
depthWrite: false,
transparent: false
} );
const material1b = new RawShaderMaterial( {
uniforms: {
'map': { value: tempMap },
'scale': { value: null },
'screenPosition': { value: null }
},
vertexShader: /* glsl */`
precision highp float;
uniform vec3 screenPosition;
uniform vec2 scale;
attribute vec3 position;
attribute vec2 uv;
varying vec2 vUV;
void main() {
vUV = uv;
gl_Position = vec4( position.xy * scale + screenPosition.xy, screenPosition.z, 1.0 );
}`,
fragmentShader: /* glsl */`
precision highp float;
uniform sampler2D map;
varying vec2 vUV;
void main() {
gl_FragColor = texture2D( map, vUV );
}`,
depthTest: false,
depthWrite: false,
transparent: false
} );
// the following object is used for occlusionMap generation
const mesh1 = new Mesh( geometry, material1a );
//
const elements = [];
const shader = LensflareElement.Shader;
const material2 = new RawShaderMaterial( {
name: shader.name,
uniforms: {
'map': { value: null },
'occlusionMap': { value: occlusionMap },
'color': { value: new Color( 0xffffff ) },
'scale': { value: new Vector2() },
'screenPosition': { value: new Vector3() }
},
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader,
blending: AdditiveBlending,
transparent: true,
depthWrite: false
} );
const mesh2 = new Mesh( geometry, material2 );
/**
* Adds the given lensflare element to this instance.
*
* @param {LensflareElement} element - The element to add.
*/
this.addElement = function ( element ) {
elements.push( element );
};
//
const scale = new Vector2();
const screenPositionPixels = new Vector2();
const validArea = new Box2();
const viewport = new Vector4();
this.onBeforeRender = function ( renderer, scene, camera ) {
renderer.getCurrentViewport( viewport );
const renderTarget = renderer.getRenderTarget();
const type = ( renderTarget !== null ) ? renderTarget.texture.type : UnsignedByteType;
if ( currentType !== type ) {
tempMap.dispose();
occlusionMap.dispose();
tempMap.type = occlusionMap.type = type;
currentType = type;
}
const invAspect = viewport.w / viewport.z;
const halfViewportWidth = viewport.z / 2.0;
const halfViewportHeight = viewport.w / 2.0;
let size = 16 / viewport.w;
scale.set( size * invAspect, size );
validArea.min.set( viewport.x, viewport.y );
validArea.max.set( viewport.x + ( viewport.z - 16 ), viewport.y + ( viewport.w - 16 ) );
// calculate position in screen space
positionView.setFromMatrixPosition( this.matrixWorld );
positionView.applyMatrix4( camera.matrixWorldInverse );
if ( positionView.z > 0 ) return; // lensflare is behind the camera
positionScreen.copy( positionView ).applyMatrix4( camera.projectionMatrix );
// horizontal and vertical coordinate of the lower left corner of the pixels to copy
screenPositionPixels.x = viewport.x + ( positionScreen.x * halfViewportWidth ) + halfViewportWidth - 8;
screenPositionPixels.y = viewport.y + ( positionScreen.y * halfViewportHeight ) + halfViewportHeight - 8;
// screen cull
if ( validArea.containsPoint( screenPositionPixels ) ) {
// save current RGB to temp texture
renderer.copyFramebufferToTexture( tempMap, screenPositionPixels );
// render pink quad
let uniforms = material1a.uniforms;
uniforms[ 'scale' ].value = scale;
uniforms[ 'screenPosition' ].value = positionScreen;
renderer.renderBufferDirect( camera, null, geometry, material1a, mesh1, null );
// copy result to occlusionMap
renderer.copyFramebufferToTexture( occlusionMap, screenPositionPixels );
// restore graphics
uniforms = material1b.uniforms;
uniforms[ 'scale' ].value = scale;
uniforms[ 'screenPosition' ].value = positionScreen;
renderer.renderBufferDirect( camera, null, geometry, material1b, mesh1, null );
// render elements
const vecX = - positionScreen.x * 2;
const vecY = - positionScreen.y * 2;
for ( let i = 0, l = elements.length; i < l; i ++ ) {
const element = elements[ i ];
const uniforms = material2.uniforms;
uniforms[ 'color' ].value.copy( element.color );
uniforms[ 'map' ].value = element.texture;
uniforms[ 'screenPosition' ].value.x = positionScreen.x + vecX * element.distance;
uniforms[ 'screenPosition' ].value.y = positionScreen.y + vecY * element.distance;
size = element.size / viewport.w;
const invAspect = viewport.w / viewport.z;
uniforms[ 'scale' ].value.set( size * invAspect, size );
material2.uniformsNeedUpdate = true;
renderer.renderBufferDirect( camera, null, geometry, material2, mesh2, null );
}
}
};
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
this.dispose = function () {
material1a.dispose();
material1b.dispose();
material2.dispose();
tempMap.dispose();
occlusionMap.dispose();
for ( let i = 0, l = elements.length; i < l; i ++ ) {
elements[ i ].texture.dispose();
}
};
}
}
/**
* Represents a single flare that can be added to a {@link Lensflare} container.
*
* @three_import import { LensflareElement } from 'three/addons/objects/Lensflare.js';
*/
class LensflareElement {
/**
* Constructs a new lensflare element.
*
* @param {Texture} texture - The flare's texture.
* @param {number} [size=1] - The size in pixels.
* @param {number} [distance=0] - The normalized distance (`[0,1]`) from the light source.
* A value of `0` means the flare is located at light source.
* @param {Color} [color] - The flare's color
*/
constructor( texture, size = 1, distance = 0, color = new Color( 0xffffff ) ) {
/**
* The flare's texture.
*
* @type {Texture}
*/
this.texture = texture;
/**
* The size in pixels.
*
* @type {number}
* @default 1
*/
this.size = size;
/**
* The normalized distance (`[0,1]`) from the light source.
* A value of `0` means the flare is located at light source.
*
* @type {number}
* @default 0
*/
this.distance = distance;
/**
* The flare's color
*
* @type {Color}
* @default (1,1,1)
*/
this.color = color;
}
}
LensflareElement.Shader = {
name: 'LensflareElementShader',
uniforms: {
'map': { value: null },
'occlusionMap': { value: null },
'color': { value: null },
'scale': { value: null },
'screenPosition': { value: null }
},
vertexShader: /* glsl */`
precision highp float;
uniform vec3 screenPosition;
uniform vec2 scale;
uniform sampler2D occlusionMap;
attribute vec3 position;
attribute vec2 uv;
varying vec2 vUV;
varying float vVisibility;
void main() {
vUV = uv;
vec2 pos = position.xy;
vec4 visibility = texture2D( occlusionMap, vec2( 0.1, 0.1 ) );
visibility += texture2D( occlusionMap, vec2( 0.5, 0.1 ) );
visibility += texture2D( occlusionMap, vec2( 0.9, 0.1 ) );
visibility += texture2D( occlusionMap, vec2( 0.9, 0.5 ) );
visibility += texture2D( occlusionMap, vec2( 0.9, 0.9 ) );
visibility += texture2D( occlusionMap, vec2( 0.5, 0.9 ) );
visibility += texture2D( occlusionMap, vec2( 0.1, 0.9 ) );
visibility += texture2D( occlusionMap, vec2( 0.1, 0.5 ) );
visibility += texture2D( occlusionMap, vec2( 0.5, 0.5 ) );
vVisibility = visibility.r / 9.0;
vVisibility *= 1.0 - visibility.g / 9.0;
vVisibility *= visibility.b / 9.0;
gl_Position = vec4( ( pos * scale + screenPosition.xy ).xy, screenPosition.z, 1.0 );
}`,
fragmentShader: /* glsl */`
precision highp float;
uniform sampler2D map;
uniform vec3 color;
varying vec2 vUV;
varying float vVisibility;
void main() {
vec4 texture = texture2D( map, vUV );
texture.a *= vVisibility;
gl_FragColor = texture;
gl_FragColor.rgb *= color;
}`
};
Lensflare.Geometry = ( function () {
const geometry = new BufferGeometry();
const float32Array = new Float32Array( [
- 1, - 1, 0, 0, 0,
1, - 1, 0, 1, 0,
1, 1, 0, 1, 1,
- 1, 1, 0, 0, 1
] );
const interleavedBuffer = new InterleavedBuffer( float32Array, 5 );
geometry.setIndex( [ 0, 1, 2, 0, 2, 3 ] );
geometry.setAttribute( 'position', new InterleavedBufferAttribute( interleavedBuffer, 3, 0, false ) );
geometry.setAttribute( 'uv', new InterleavedBufferAttribute( interleavedBuffer, 2, 3, false ) );
return geometry;
} )();
export { Lensflare, LensflareElement };

View File

@@ -0,0 +1,376 @@
import {
AdditiveBlending,
Box2,
BufferGeometry,
Color,
FramebufferTexture,
InterleavedBuffer,
InterleavedBufferAttribute,
Mesh,
MeshBasicNodeMaterial,
NodeMaterial,
UnsignedByteType,
Vector2,
Vector3,
Vector4,
Node
} from 'three/webgpu';
import { texture, textureLoad, uv, ivec2, vec2, vec4, positionGeometry, reference, varyingProperty, materialReference, Fn } from 'three/tsl';
/**
* Creates a simulated lens flare that tracks a light.
*
* Note that this class can only be used with {@link WebGPURenderer}.
* When using {@link WebGLRenderer}, use {@link Lensflare}.
*
* ```js
* const light = new THREE.PointLight( 0xffffff, 1.5, 2000 );
*
* const lensflare = new LensflareMesh();
* lensflare.addElement( new LensflareElement( textureFlare0, 512, 0 ) );
* lensflare.addElement( new LensflareElement( textureFlare1, 512, 0 ) );
* lensflare.addElement( new LensflareElement( textureFlare2, 60, 0.6 ) );
*
* light.add( lensflare );
* ```
*
* @augments Mesh
* @three_import import { LensflareMesh } from 'three/addons/objects/LensflareMesh.js';
*/
class LensflareMesh extends Mesh {
/**
* Constructs a new lensflare mesh.
*/
constructor() {
super( LensflareMesh.Geometry, new MeshBasicNodeMaterial( { opacity: 0, transparent: true } ) );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isLensflareMesh = true;
this.type = 'LensflareMesh';
/**
* Overwritten to disable view-frustum culling by default.
*
* @type {boolean}
* @default false
*/
this.frustumCulled = false;
/**
* Overwritten to make sure lensflares a rendered last.
*
* @type {number}
* @default Infinity
*/
this.renderOrder = Infinity;
//
const positionView = new Vector3();
// textures
const tempMap = new FramebufferTexture( 16, 16 );
const occlusionMap = new FramebufferTexture( 16, 16 );
let currentType = UnsignedByteType;
const geometry = LensflareMesh.Geometry;
// values for shared material uniforms
const sharedValues = {
scale: new Vector2(),
positionScreen: new Vector3()
};
// materials
const scale = reference( 'scale', 'vec2', sharedValues );
const screenPosition = reference( 'positionScreen', 'vec3', sharedValues );
const vertexNode = vec4( positionGeometry.xy.mul( scale ).add( screenPosition.xy ), screenPosition.z, 1.0 );
const material1a = new NodeMaterial();
material1a.depthTest = true;
material1a.depthWrite = false;
material1a.transparent = false;
material1a.fog = false;
material1a.type = 'Lensflare-1a';
material1a.vertexNode = vertexNode;
material1a.colorNode = vec4( 1.0, 0.0, 1.0, 1.0 );
const material1b = new NodeMaterial();
material1b.depthTest = false;
material1b.depthWrite = false;
material1b.transparent = false;
material1b.fog = false;
material1b.type = 'Lensflare-1b';
material1b.vertexNode = vertexNode;
material1b.colorNode = texture( tempMap, vec2( uv().flipY() ) );
// the following object is used for occlusionMap generation
const mesh1 = new Mesh( geometry, material1a );
//
const elements = [];
const elementMeshes = [];
const material2 = new NodeMaterial();
material2.transparent = true;
material2.blending = AdditiveBlending;
material2.depthWrite = false;
material2.depthTest = false;
material2.fog = false;
material2.type = 'Lensflare-2';
material2.screenPosition = new Vector3();
material2.scale = new Vector2();
material2.occlusionMap = occlusionMap;
material2.vertexNode = Fn( ( { material } ) => {
const scale = materialReference( 'scale', 'vec2' );
const screenPosition = materialReference( 'screenPosition', 'vec3' );
const occlusionMap = material.occlusionMap;
const pos = positionGeometry.xy.toVar();
const visibility = textureLoad( occlusionMap, ivec2( 2, 2 ) ).toVar();
visibility.addAssign( textureLoad( occlusionMap, ivec2( 8, 2 ) ) );
visibility.addAssign( textureLoad( occlusionMap, ivec2( 14, 2 ) ) );
visibility.addAssign( textureLoad( occlusionMap, ivec2( 14, 8 ) ) );
visibility.addAssign( textureLoad( occlusionMap, ivec2( 14, 14 ) ) );
visibility.addAssign( textureLoad( occlusionMap, ivec2( 8, 14 ) ) );
visibility.addAssign( textureLoad( occlusionMap, ivec2( 2, 14 ) ) );
visibility.addAssign( textureLoad( occlusionMap, ivec2( 2, 8 ) ) );
visibility.addAssign( textureLoad( occlusionMap, ivec2( 8, 8 ) ) );
const vVisibility = varyingProperty( 'float', 'vVisibility' );
vVisibility.assign( visibility.r.div( 9.0 ) );
vVisibility.mulAssign( visibility.g.div( 9.0 ).oneMinus() );
vVisibility.mulAssign( visibility.b.div( 9.0 ) );
return vec4( ( pos.mul( scale ).add( screenPosition.xy ).xy ), screenPosition.z, 1.0 );
} )();
material2.colorNode = Fn( () => {
const color = reference( 'color', 'color' );
const map = reference( 'map', 'texture' );
const vVisibility = varyingProperty( 'float', 'vVisibility' );
const output = map.toVar();
output.a.mulAssign( vVisibility );
output.rgb.mulAssign( color );
return output;
} )();
/**
* Adds the given lensflare element to this instance.
*
* @param {LensflareElement} element - The element to add.
*/
this.addElement = function ( element ) {
elements.push( element );
};
//
const positionScreen = sharedValues.positionScreen;
const screenPositionPixels = new Vector4( 0, 0, 16, 16 );
const validArea = new Box2();
const viewport = new Vector4();
// dummy node for renderer.renderObject()
const lightsNode = new Node();
this.onBeforeRender = ( renderer, scene, camera ) => {
renderer.getViewport( viewport );
viewport.multiplyScalar( window.devicePixelRatio );
const renderTarget = renderer.getRenderTarget();
const type = ( renderTarget !== null ) ? renderTarget.texture.type : UnsignedByteType;
if ( currentType !== type ) {
tempMap.dispose();
occlusionMap.dispose();
tempMap.type = occlusionMap.type = type;
currentType = type;
}
const invAspect = viewport.w / viewport.z;
const halfViewportWidth = viewport.z / 2.0;
const halfViewportHeight = viewport.w / 2.0;
const size = 16 / viewport.w;
sharedValues.scale.set( size * invAspect, size );
validArea.min.set( viewport.x, viewport.y );
validArea.max.set( viewport.x + ( viewport.z - 16 ), viewport.y + ( viewport.w - 16 ) );
// calculate position in screen space
positionView.setFromMatrixPosition( this.matrixWorld );
positionView.applyMatrix4( camera.matrixWorldInverse );
if ( positionView.z > 0 ) return; // lensflare is behind the camera
positionScreen.copy( positionView ).applyMatrix4( camera.projectionMatrix );
// horizontal and vertical coordinate of the lower left corner of the pixels to copy
screenPositionPixels.x = viewport.x + ( positionScreen.x * halfViewportWidth ) + halfViewportWidth - 8;
screenPositionPixels.y = viewport.y - ( positionScreen.y * halfViewportHeight ) + halfViewportHeight - 8;
// screen cull
if ( validArea.containsPoint( screenPositionPixels ) ) {
// save current RGB to temp texture
renderer.copyFramebufferToTexture( tempMap, screenPositionPixels );
// render pink quad
renderer.renderObject( mesh1, scene, camera, geometry, material1a, null, lightsNode );
// copy result to occlusionMap
renderer.copyFramebufferToTexture( occlusionMap, screenPositionPixels );
// restore graphics
renderer.renderObject( mesh1, scene, camera, geometry, material1b, null, lightsNode );
// render elements
const vecX = - positionScreen.x * 2;
const vecY = - positionScreen.y * 2;
for ( let i = 0, l = elements.length; i < l; i ++ ) {
const element = elements[ i ];
let mesh2 = elementMeshes[ i ];
if ( mesh2 === undefined ) {
mesh2 = elementMeshes[ i ] = new Mesh( geometry, material2 );
mesh2.color = element.color.convertSRGBToLinear();
mesh2.map = element.texture;
}
material2.screenPosition.x = positionScreen.x + vecX * element.distance;
material2.screenPosition.y = positionScreen.y - vecY * element.distance;
material2.screenPosition.z = positionScreen.z;
const size = element.size / viewport.w;
material2.scale.set( size * invAspect, size );
renderer.renderObject( mesh2, scene, camera, geometry, material2, null, lightsNode );
}
}
};
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
this.dispose = function () {
material1a.dispose();
material1b.dispose();
material2.dispose();
tempMap.dispose();
occlusionMap.dispose();
for ( let i = 0, l = elements.length; i < l; i ++ ) {
elements[ i ].texture.dispose();
}
};
}
}
//
class LensflareElement {
constructor( texture, size = 1, distance = 0, color = new Color( 0xffffff ) ) {
this.texture = texture;
this.size = size;
this.distance = distance;
this.color = color;
}
}
LensflareMesh.Geometry = ( function () {
const geometry = new BufferGeometry();
const float32Array = new Float32Array( [
- 1, - 1, 0, 0, 0,
1, - 1, 0, 1, 0,
1, 1, 0, 1, 1,
- 1, 1, 0, 0, 1
] );
const interleavedBuffer = new InterleavedBuffer( float32Array, 5 );
geometry.setIndex( [ 0, 1, 2, 0, 2, 3 ] );
geometry.setAttribute( 'position', new InterleavedBufferAttribute( interleavedBuffer, 3, 0, false ) );
geometry.setAttribute( 'uv', new InterleavedBufferAttribute( interleavedBuffer, 2, 3, false ) );
return geometry;
} )();
export { LensflareMesh, LensflareElement };

1261
node_modules/three/examples/jsm/objects/MarchingCubes.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

338
node_modules/three/examples/jsm/objects/Reflector.js generated vendored Normal file
View File

@@ -0,0 +1,338 @@
import {
Color,
Matrix4,
Mesh,
PerspectiveCamera,
Plane,
ShaderMaterial,
UniformsUtils,
Vector3,
Vector4,
WebGLRenderTarget,
HalfFloatType
} from 'three';
/**
* Can be used to create a flat, reflective surface like a mirror.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link ReflectorNode}.
*
* ```js
* const geometry = new THREE.PlaneGeometry( 100, 100 );
*
* const reflector = new Reflector( geometry, {
* clipBias: 0.003,
* textureWidth: window.innerWidth * window.devicePixelRatio,
* textureHeight: window.innerHeight * window.devicePixelRatio,
* color: 0xc1cbcb
* } );
*
* scene.add( reflector );
* ```
*
* @augments Mesh
* @three_import import { Reflector } from 'three/addons/objects/Reflector.js';
*/
class Reflector extends Mesh {
/**
* Constructs a new reflector.
*
* @param {BufferGeometry} geometry - The reflector's geometry.
* @param {Reflector~Options} [options] - The configuration options.
*/
constructor( geometry, options = {} ) {
super( geometry );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isReflector = true;
this.type = 'Reflector';
/**
* Whether to force an update, no matter if the reflector
* is in view or not.
*
* @type {boolean}
* @default false
*/
this.forceUpdate = false;
/**
* The reflector's virtual camera. This is used to render
* the scene from the mirror's point of view.
*
* @type {PerspectiveCamera}
*/
this.camera = new PerspectiveCamera();
const scope = this;
const color = ( options.color !== undefined ) ? new Color( options.color ) : new Color( 0x7F7F7F );
const textureWidth = options.textureWidth || 512;
const textureHeight = options.textureHeight || 512;
const clipBias = options.clipBias || 0;
const shader = options.shader || Reflector.ReflectorShader;
const multisample = ( options.multisample !== undefined ) ? options.multisample : 4;
//
const reflectorPlane = new Plane();
const normal = new Vector3();
const reflectorWorldPosition = new Vector3();
const cameraWorldPosition = new Vector3();
const rotationMatrix = new Matrix4();
const lookAtPosition = new Vector3( 0, 0, - 1 );
const clipPlane = new Vector4();
const view = new Vector3();
const target = new Vector3();
const q = new Vector4();
const textureMatrix = new Matrix4();
const virtualCamera = this.camera;
const renderTarget = new WebGLRenderTarget( textureWidth, textureHeight, { samples: multisample, type: HalfFloatType } );
const material = new ShaderMaterial( {
name: ( shader.name !== undefined ) ? shader.name : 'unspecified',
uniforms: UniformsUtils.clone( shader.uniforms ),
fragmentShader: shader.fragmentShader,
vertexShader: shader.vertexShader
} );
material.uniforms[ 'tDiffuse' ].value = renderTarget.texture;
material.uniforms[ 'color' ].value = color;
material.uniforms[ 'textureMatrix' ].value = textureMatrix;
this.material = material;
this.onBeforeRender = function ( renderer, scene, camera ) {
reflectorWorldPosition.setFromMatrixPosition( scope.matrixWorld );
cameraWorldPosition.setFromMatrixPosition( camera.matrixWorld );
rotationMatrix.extractRotation( scope.matrixWorld );
normal.set( 0, 0, 1 );
normal.applyMatrix4( rotationMatrix );
view.subVectors( reflectorWorldPosition, cameraWorldPosition );
// Avoid rendering when reflector is facing away unless forcing an update
const isFacingAway = view.dot( normal ) > 0;
if ( isFacingAway === true && this.forceUpdate === false ) return;
view.reflect( normal ).negate();
view.add( reflectorWorldPosition );
rotationMatrix.extractRotation( camera.matrixWorld );
lookAtPosition.set( 0, 0, - 1 );
lookAtPosition.applyMatrix4( rotationMatrix );
lookAtPosition.add( cameraWorldPosition );
target.subVectors( reflectorWorldPosition, lookAtPosition );
target.reflect( normal ).negate();
target.add( reflectorWorldPosition );
virtualCamera.position.copy( view );
virtualCamera.up.set( 0, 1, 0 );
virtualCamera.up.applyMatrix4( rotationMatrix );
virtualCamera.up.reflect( normal );
virtualCamera.lookAt( target );
virtualCamera.far = camera.far; // Used in WebGLBackground
virtualCamera.updateMatrixWorld();
virtualCamera.projectionMatrix.copy( camera.projectionMatrix );
// Update the texture matrix
textureMatrix.set(
0.5, 0.0, 0.0, 0.5,
0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 0.5, 0.5,
0.0, 0.0, 0.0, 1.0
);
textureMatrix.multiply( virtualCamera.projectionMatrix );
textureMatrix.multiply( virtualCamera.matrixWorldInverse );
textureMatrix.multiply( scope.matrixWorld );
// Now update projection matrix with new clip plane, implementing code from: http://www.terathon.com/code/oblique.html
// Paper explaining this technique: http://www.terathon.com/lengyel/Lengyel-Oblique.pdf
reflectorPlane.setFromNormalAndCoplanarPoint( normal, reflectorWorldPosition );
reflectorPlane.applyMatrix4( virtualCamera.matrixWorldInverse );
clipPlane.set( reflectorPlane.normal.x, reflectorPlane.normal.y, reflectorPlane.normal.z, reflectorPlane.constant );
const projectionMatrix = virtualCamera.projectionMatrix;
q.x = ( Math.sign( clipPlane.x ) + projectionMatrix.elements[ 8 ] ) / projectionMatrix.elements[ 0 ];
q.y = ( Math.sign( clipPlane.y ) + projectionMatrix.elements[ 9 ] ) / projectionMatrix.elements[ 5 ];
q.z = - 1.0;
q.w = ( 1.0 + projectionMatrix.elements[ 10 ] ) / projectionMatrix.elements[ 14 ];
// Calculate the scaled plane vector
clipPlane.multiplyScalar( 2.0 / clipPlane.dot( q ) );
// Replacing the third row of the projection matrix
projectionMatrix.elements[ 2 ] = clipPlane.x;
projectionMatrix.elements[ 6 ] = clipPlane.y;
projectionMatrix.elements[ 10 ] = clipPlane.z + 1.0 - clipBias;
projectionMatrix.elements[ 14 ] = clipPlane.w;
// Render
scope.visible = false;
const currentRenderTarget = renderer.getRenderTarget();
const currentXrEnabled = renderer.xr.enabled;
const currentShadowAutoUpdate = renderer.shadowMap.autoUpdate;
renderer.xr.enabled = false; // Avoid camera modification
renderer.shadowMap.autoUpdate = false; // Avoid re-computing shadows
renderer.setRenderTarget( renderTarget );
renderer.state.buffers.depth.setMask( true ); // make sure the depth buffer is writable so it can be properly cleared, see #18897
if ( renderer.autoClear === false ) renderer.clear();
renderer.render( scene, virtualCamera );
renderer.xr.enabled = currentXrEnabled;
renderer.shadowMap.autoUpdate = currentShadowAutoUpdate;
renderer.setRenderTarget( currentRenderTarget );
// Restore viewport
const viewport = camera.viewport;
if ( viewport !== undefined ) {
renderer.state.viewport( viewport );
}
scope.visible = true;
this.forceUpdate = false;
};
/**
* Returns the reflector's internal render target.
*
* @return {WebGLRenderTarget} The internal render target
*/
this.getRenderTarget = function () {
return renderTarget;
};
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
this.dispose = function () {
renderTarget.dispose();
scope.material.dispose();
};
}
}
Reflector.ReflectorShader = {
name: 'ReflectorShader',
uniforms: {
'color': {
value: null
},
'tDiffuse': {
value: null
},
'textureMatrix': {
value: null
}
},
vertexShader: /* glsl */`
uniform mat4 textureMatrix;
varying vec4 vUv;
#include <common>
#include <logdepthbuf_pars_vertex>
void main() {
vUv = textureMatrix * vec4( position, 1.0 );
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
#include <logdepthbuf_vertex>
}`,
fragmentShader: /* glsl */`
uniform vec3 color;
uniform sampler2D tDiffuse;
varying vec4 vUv;
#include <logdepthbuf_pars_fragment>
float blendOverlay( float base, float blend ) {
return( base < 0.5 ? ( 2.0 * base * blend ) : ( 1.0 - 2.0 * ( 1.0 - base ) * ( 1.0 - blend ) ) );
}
vec3 blendOverlay( vec3 base, vec3 blend ) {
return vec3( blendOverlay( base.r, blend.r ), blendOverlay( base.g, blend.g ), blendOverlay( base.b, blend.b ) );
}
void main() {
#include <logdepthbuf_fragment>
vec4 base = texture2DProj( tDiffuse, vUv );
gl_FragColor = vec4( blendOverlay( base.rgb, color ), 1.0 );
#include <tonemapping_fragment>
#include <colorspace_fragment>
}`
};
/**
* Constructor options of `Reflector`.
*
* @typedef {Object} Reflector~Options
* @property {number|Color|string} [color=0x7F7F7F] - The reflector's color.
* @property {number} [textureWidth=512] - The texture width. A higher value results in more clear reflections but is also more expensive.
* @property {number} [textureHeight=512] - The texture height. A higher value results in more clear reflections but is also more expensive.
* @property {number} [clipBias=0] - The clip bias.
* @property {Object} [shader] - Can be used to pass in a custom shader that defines how the reflective view is projected onto the reflector's geometry.
* @property {number} [multisample=4] - How many samples to use for MSAA. `0` disables MSAA.
**/
export { Reflector };

View File

@@ -0,0 +1,392 @@
import {
Color,
Matrix4,
Mesh,
PerspectiveCamera,
ShaderMaterial,
UniformsUtils,
Vector2,
Vector3,
WebGLRenderTarget,
DepthTexture,
UnsignedShortType,
NearestFilter,
Plane,
HalfFloatType
} from 'three';
/**
* A special version of {@link Reflector} for usage with {@link SSRPass}.
*
* @augments Mesh
* @three_import import { ReflectorForSSRPass } from 'three/addons/objects/ReflectorForSSRPass.js';
*/
class ReflectorForSSRPass extends Mesh {
/**
* Constructs a new reflector.
*
* @param {BufferGeometry} geometry - The reflector's geometry.
* @param {ReflectorForSSRPass~Options} [options] - The configuration options.
*/
constructor( geometry, options = {} ) {
super( geometry );
this.isReflectorForSSRPass = true;
this.type = 'ReflectorForSSRPass';
const scope = this;
const color = ( options.color !== undefined ) ? new Color( options.color ) : new Color( 0x7F7F7F );
const textureWidth = options.textureWidth || 512;
const textureHeight = options.textureHeight || 512;
const clipBias = options.clipBias || 0;
const shader = options.shader || ReflectorForSSRPass.ReflectorShader;
const useDepthTexture = options.useDepthTexture === true;
const yAxis = new Vector3( 0, 1, 0 );
const vecTemp0 = new Vector3();
const vecTemp1 = new Vector3();
//
scope.needsUpdate = false;
scope.maxDistance = ReflectorForSSRPass.ReflectorShader.uniforms.maxDistance.value;
scope.opacity = ReflectorForSSRPass.ReflectorShader.uniforms.opacity.value;
scope.color = color;
scope.resolution = options.resolution || new Vector2( window.innerWidth, window.innerHeight );
scope._distanceAttenuation = ReflectorForSSRPass.ReflectorShader.defines.DISTANCE_ATTENUATION;
Object.defineProperty( scope, 'distanceAttenuation', {
get() {
return scope._distanceAttenuation;
},
set( val ) {
if ( scope._distanceAttenuation === val ) return;
scope._distanceAttenuation = val;
scope.material.defines.DISTANCE_ATTENUATION = val;
scope.material.needsUpdate = true;
}
} );
scope._fresnel = ReflectorForSSRPass.ReflectorShader.defines.FRESNEL;
Object.defineProperty( scope, 'fresnel', {
get() {
return scope._fresnel;
},
set( val ) {
if ( scope._fresnel === val ) return;
scope._fresnel = val;
scope.material.defines.FRESNEL = val;
scope.material.needsUpdate = true;
}
} );
const normal = new Vector3();
const reflectorWorldPosition = new Vector3();
const cameraWorldPosition = new Vector3();
const rotationMatrix = new Matrix4();
const lookAtPosition = new Vector3( 0, 0, - 1 );
const view = new Vector3();
const target = new Vector3();
const textureMatrix = new Matrix4();
const virtualCamera = new PerspectiveCamera();
let depthTexture;
if ( useDepthTexture ) {
depthTexture = new DepthTexture();
depthTexture.type = UnsignedShortType;
depthTexture.minFilter = NearestFilter;
depthTexture.magFilter = NearestFilter;
}
const parameters = {
depthTexture: useDepthTexture ? depthTexture : null,
type: HalfFloatType
};
const renderTarget = new WebGLRenderTarget( textureWidth, textureHeight, parameters );
const material = new ShaderMaterial( {
name: ( shader.name !== undefined ) ? shader.name : 'unspecified',
transparent: useDepthTexture,
defines: Object.assign( {}, ReflectorForSSRPass.ReflectorShader.defines, {
useDepthTexture
} ),
uniforms: UniformsUtils.clone( shader.uniforms ),
fragmentShader: shader.fragmentShader,
vertexShader: shader.vertexShader
} );
material.uniforms[ 'tDiffuse' ].value = renderTarget.texture;
material.uniforms[ 'color' ].value = scope.color;
material.uniforms[ 'textureMatrix' ].value = textureMatrix;
if ( useDepthTexture ) {
material.uniforms[ 'tDepth' ].value = renderTarget.depthTexture;
}
this.material = material;
const globalPlane = new Plane( new Vector3( 0, 1, 0 ), clipBias );
const globalPlanes = [ globalPlane ];
this.doRender = function ( renderer, scene, camera ) {
material.uniforms[ 'maxDistance' ].value = scope.maxDistance;
material.uniforms[ 'color' ].value = scope.color;
material.uniforms[ 'opacity' ].value = scope.opacity;
vecTemp0.copy( camera.position ).normalize();
vecTemp1.copy( vecTemp0 ).reflect( yAxis );
material.uniforms[ 'fresnelCoe' ].value = ( vecTemp0.dot( vecTemp1 ) + 1. ) / 2.; // TODO: Also need to use glsl viewPosition and viewNormal per pixel.
reflectorWorldPosition.setFromMatrixPosition( scope.matrixWorld );
cameraWorldPosition.setFromMatrixPosition( camera.matrixWorld );
rotationMatrix.extractRotation( scope.matrixWorld );
normal.set( 0, 0, 1 );
normal.applyMatrix4( rotationMatrix );
view.subVectors( reflectorWorldPosition, cameraWorldPosition );
// Avoid rendering when reflector is facing away
if ( view.dot( normal ) > 0 ) return;
view.reflect( normal ).negate();
view.add( reflectorWorldPosition );
rotationMatrix.extractRotation( camera.matrixWorld );
lookAtPosition.set( 0, 0, - 1 );
lookAtPosition.applyMatrix4( rotationMatrix );
lookAtPosition.add( cameraWorldPosition );
target.subVectors( reflectorWorldPosition, lookAtPosition );
target.reflect( normal ).negate();
target.add( reflectorWorldPosition );
virtualCamera.position.copy( view );
virtualCamera.up.set( 0, 1, 0 );
virtualCamera.up.applyMatrix4( rotationMatrix );
virtualCamera.up.reflect( normal );
virtualCamera.lookAt( target );
virtualCamera.far = camera.far; // Used in WebGLBackground
virtualCamera.updateMatrixWorld();
virtualCamera.projectionMatrix.copy( camera.projectionMatrix );
material.uniforms[ 'virtualCameraNear' ].value = camera.near;
material.uniforms[ 'virtualCameraFar' ].value = camera.far;
material.uniforms[ 'virtualCameraMatrixWorld' ].value = virtualCamera.matrixWorld;
material.uniforms[ 'virtualCameraProjectionMatrix' ].value = camera.projectionMatrix;
material.uniforms[ 'virtualCameraProjectionMatrixInverse' ].value = camera.projectionMatrixInverse;
material.uniforms[ 'resolution' ].value = scope.resolution;
// Update the texture matrix
textureMatrix.set(
0.5, 0.0, 0.0, 0.5,
0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 0.5, 0.5,
0.0, 0.0, 0.0, 1.0
);
textureMatrix.multiply( virtualCamera.projectionMatrix );
textureMatrix.multiply( virtualCamera.matrixWorldInverse );
textureMatrix.multiply( scope.matrixWorld );
// scope.visible = false;
const currentRenderTarget = renderer.getRenderTarget();
const currentXrEnabled = renderer.xr.enabled;
const currentShadowAutoUpdate = renderer.shadowMap.autoUpdate;
const currentClippingPlanes = renderer.clippingPlanes;
renderer.xr.enabled = false; // Avoid camera modification
renderer.shadowMap.autoUpdate = false; // Avoid re-computing shadows
renderer.clippingPlanes = globalPlanes;
renderer.setRenderTarget( renderTarget );
renderer.state.buffers.depth.setMask( true ); // make sure the depth buffer is writable so it can be properly cleared, see #18897
if ( renderer.autoClear === false ) renderer.clear();
renderer.render( scene, virtualCamera );
renderer.xr.enabled = currentXrEnabled;
renderer.shadowMap.autoUpdate = currentShadowAutoUpdate;
renderer.clippingPlanes = currentClippingPlanes;
renderer.setRenderTarget( currentRenderTarget );
// Restore viewport
const viewport = camera.viewport;
if ( viewport !== undefined ) {
renderer.state.viewport( viewport );
}
// scope.visible = true;
};
/**
* Returns the reflector's internal render target.
*
* @return {WebGLRenderTarget} The internal render target
*/
this.getRenderTarget = function () {
return renderTarget;
};
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
this.dispose = function () {
renderTarget.dispose();
scope.material.dispose();
};
}
}
ReflectorForSSRPass.ReflectorShader = {
name: 'ReflectorShader',
defines: {
DISTANCE_ATTENUATION: true,
FRESNEL: true,
},
uniforms: {
color: { value: null },
tDiffuse: { value: null },
tDepth: { value: null },
textureMatrix: { value: new Matrix4() },
maxDistance: { value: 180 },
opacity: { value: 0.5 },
fresnelCoe: { value: null },
virtualCameraNear: { value: null },
virtualCameraFar: { value: null },
virtualCameraProjectionMatrix: { value: new Matrix4() },
virtualCameraMatrixWorld: { value: new Matrix4() },
virtualCameraProjectionMatrixInverse: { value: new Matrix4() },
resolution: { value: new Vector2() },
},
vertexShader: /* glsl */`
uniform mat4 textureMatrix;
varying vec4 vUv;
void main() {
vUv = textureMatrix * vec4( position, 1.0 );
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform vec3 color;
uniform sampler2D tDiffuse;
uniform sampler2D tDepth;
uniform float maxDistance;
uniform float opacity;
uniform float fresnelCoe;
uniform float virtualCameraNear;
uniform float virtualCameraFar;
uniform mat4 virtualCameraProjectionMatrix;
uniform mat4 virtualCameraProjectionMatrixInverse;
uniform mat4 virtualCameraMatrixWorld;
uniform vec2 resolution;
varying vec4 vUv;
#include <packing>
float blendOverlay( float base, float blend ) {
return( base < 0.5 ? ( 2.0 * base * blend ) : ( 1.0 - 2.0 * ( 1.0 - base ) * ( 1.0 - blend ) ) );
}
vec3 blendOverlay( vec3 base, vec3 blend ) {
return vec3( blendOverlay( base.r, blend.r ), blendOverlay( base.g, blend.g ), blendOverlay( base.b, blend.b ) );
}
float getDepth( const in vec2 uv ) {
return texture2D( tDepth, uv ).x;
}
float getViewZ( const in float depth ) {
return perspectiveDepthToViewZ( depth, virtualCameraNear, virtualCameraFar );
}
vec3 getViewPosition( const in vec2 uv, const in float depth/*clip space*/, const in float clipW ) {
vec4 clipPosition = vec4( ( vec3( uv, depth ) - 0.5 ) * 2.0, 1.0 );//ndc
clipPosition *= clipW; //clip
return ( virtualCameraProjectionMatrixInverse * clipPosition ).xyz;//view
}
void main() {
vec4 base = texture2DProj( tDiffuse, vUv );
#ifdef useDepthTexture
vec2 uv=(gl_FragCoord.xy-.5)/resolution.xy;
uv.x=1.-uv.x;
float depth = texture2DProj( tDepth, vUv ).r;
float viewZ = getViewZ( depth );
float clipW = virtualCameraProjectionMatrix[2][3] * viewZ+virtualCameraProjectionMatrix[3][3];
vec3 viewPosition=getViewPosition( uv, depth, clipW );
vec3 worldPosition=(virtualCameraMatrixWorld*vec4(viewPosition,1)).xyz;
if(worldPosition.y>maxDistance) discard;
float op=opacity;
#ifdef DISTANCE_ATTENUATION
float ratio=1.-(worldPosition.y/maxDistance);
float attenuation=ratio*ratio;
op=opacity*attenuation;
#endif
#ifdef FRESNEL
op*=fresnelCoe;
#endif
gl_FragColor = vec4( blendOverlay( base.rgb, color ), op );
#else
gl_FragColor = vec4( blendOverlay( base.rgb, color ), 1.0 );
#endif
}
`,
};
/**
* Constructor options of `ReflectorForSSRPass`.
*
* @typedef {Object} ReflectorForSSRPass~Options
* @property {number|Color|string} [color=0x7F7F7F] - The reflector's color.
* @property {number} [textureWidth=512] - The texture width. A higher value results in more clear reflections but is also more expensive.
* @property {number} [textureHeight=512] - The texture height. A higher value results in more clear reflections but is also more expensive.
* @property {number} [clipBias=0] - The clip bias.
* @property {Object} [shader] - Can be used to pass in a custom shader that defines how the reflective view is projected onto the reflector's geometry.
* @property {boolean} [useDepthTexture=true] - Whether to store depth values in a texture or not.
**/
export { ReflectorForSSRPass };

389
node_modules/three/examples/jsm/objects/Refractor.js generated vendored Normal file
View File

@@ -0,0 +1,389 @@
import {
Color,
Matrix4,
Mesh,
PerspectiveCamera,
Plane,
Quaternion,
ShaderMaterial,
UniformsUtils,
Vector3,
Vector4,
WebGLRenderTarget,
HalfFloatType
} from 'three';
/**
* Can be used to create a flat, refractive surface like for special
* windows or water effects.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link viewportSharedTexture}.
*
* ```js
* const geometry = new THREE.PlaneGeometry( 100, 100 );
*
* const refractor = new Refractor( refractorGeometry, {
* color: 0xcbcbcb,
* textureWidth: 1024,
* textureHeight: 1024
* } );
*
* scene.add( refractor );
* ```
*
* @augments Mesh
* @three_import import { Refractor } from 'three/addons/objects/Refractor.js';
*/
class Refractor extends Mesh {
/**
* Constructs a new refractor.
*
* @param {BufferGeometry} geometry - The refractor's geometry.
* @param {Refractor~Options} [options] - The configuration options.
*/
constructor( geometry, options = {} ) {
super( geometry );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isRefractor = true;
this.type = 'Refractor';
/**
* The reflector's virtual camera.
*
* @type {PerspectiveCamera}
*/
this.camera = new PerspectiveCamera();
const scope = this;
const color = ( options.color !== undefined ) ? new Color( options.color ) : new Color( 0x7F7F7F );
const textureWidth = options.textureWidth || 512;
const textureHeight = options.textureHeight || 512;
const clipBias = options.clipBias || 0;
const shader = options.shader || Refractor.RefractorShader;
const multisample = ( options.multisample !== undefined ) ? options.multisample : 4;
//
const virtualCamera = this.camera;
virtualCamera.matrixAutoUpdate = false;
virtualCamera.userData.refractor = true;
//
const refractorPlane = new Plane();
const textureMatrix = new Matrix4();
// render target
const renderTarget = new WebGLRenderTarget( textureWidth, textureHeight, { samples: multisample, type: HalfFloatType } );
// material
this.material = new ShaderMaterial( {
name: ( shader.name !== undefined ) ? shader.name : 'unspecified',
uniforms: UniformsUtils.clone( shader.uniforms ),
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader,
transparent: true // ensures, refractors are drawn from farthest to closest
} );
this.material.uniforms[ 'color' ].value = color;
this.material.uniforms[ 'tDiffuse' ].value = renderTarget.texture;
this.material.uniforms[ 'textureMatrix' ].value = textureMatrix;
// functions
const visible = ( function () {
const refractorWorldPosition = new Vector3();
const cameraWorldPosition = new Vector3();
const rotationMatrix = new Matrix4();
const view = new Vector3();
const normal = new Vector3();
return function visible( camera ) {
refractorWorldPosition.setFromMatrixPosition( scope.matrixWorld );
cameraWorldPosition.setFromMatrixPosition( camera.matrixWorld );
view.subVectors( refractorWorldPosition, cameraWorldPosition );
rotationMatrix.extractRotation( scope.matrixWorld );
normal.set( 0, 0, 1 );
normal.applyMatrix4( rotationMatrix );
return view.dot( normal ) < 0;
};
} )();
const updateRefractorPlane = ( function () {
const normal = new Vector3();
const position = new Vector3();
const quaternion = new Quaternion();
const scale = new Vector3();
return function updateRefractorPlane() {
scope.matrixWorld.decompose( position, quaternion, scale );
normal.set( 0, 0, 1 ).applyQuaternion( quaternion ).normalize();
// flip the normal because we want to cull everything above the plane
normal.negate();
refractorPlane.setFromNormalAndCoplanarPoint( normal, position );
};
} )();
const updateVirtualCamera = ( function () {
const clipPlane = new Plane();
const clipVector = new Vector4();
const q = new Vector4();
return function updateVirtualCamera( camera ) {
virtualCamera.matrixWorld.copy( camera.matrixWorld );
virtualCamera.matrixWorldInverse.copy( virtualCamera.matrixWorld ).invert();
virtualCamera.projectionMatrix.copy( camera.projectionMatrix );
virtualCamera.far = camera.far; // used in WebGLBackground
// The following code creates an oblique view frustum for clipping.
// see: Lengyel, Eric. “Oblique View Frustum Depth Projection and Clipping”.
// Journal of Game Development, Vol. 1, No. 2 (2005), Charles River Media, pp. 516
clipPlane.copy( refractorPlane );
clipPlane.applyMatrix4( virtualCamera.matrixWorldInverse );
clipVector.set( clipPlane.normal.x, clipPlane.normal.y, clipPlane.normal.z, clipPlane.constant );
// calculate the clip-space corner point opposite the clipping plane and
// transform it into camera space by multiplying it by the inverse of the projection matrix
const projectionMatrix = virtualCamera.projectionMatrix;
q.x = ( Math.sign( clipVector.x ) + projectionMatrix.elements[ 8 ] ) / projectionMatrix.elements[ 0 ];
q.y = ( Math.sign( clipVector.y ) + projectionMatrix.elements[ 9 ] ) / projectionMatrix.elements[ 5 ];
q.z = - 1.0;
q.w = ( 1.0 + projectionMatrix.elements[ 10 ] ) / projectionMatrix.elements[ 14 ];
// calculate the scaled plane vector
clipVector.multiplyScalar( 2.0 / clipVector.dot( q ) );
// replacing the third row of the projection matrix
projectionMatrix.elements[ 2 ] = clipVector.x;
projectionMatrix.elements[ 6 ] = clipVector.y;
projectionMatrix.elements[ 10 ] = clipVector.z + 1.0 - clipBias;
projectionMatrix.elements[ 14 ] = clipVector.w;
};
} )();
// This will update the texture matrix that is used for projective texture mapping in the shader.
// see: http://developer.download.nvidia.com/assets/gamedev/docs/projective_texture_mapping.pdf
function updateTextureMatrix( camera ) {
// this matrix does range mapping to [ 0, 1 ]
textureMatrix.set(
0.5, 0.0, 0.0, 0.5,
0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 0.5, 0.5,
0.0, 0.0, 0.0, 1.0
);
// we use "Object Linear Texgen", so we need to multiply the texture matrix T
// (matrix above) with the projection and view matrix of the virtual camera
// and the model matrix of the refractor
textureMatrix.multiply( camera.projectionMatrix );
textureMatrix.multiply( camera.matrixWorldInverse );
textureMatrix.multiply( scope.matrixWorld );
}
//
function render( renderer, scene, camera ) {
scope.visible = false;
const currentRenderTarget = renderer.getRenderTarget();
const currentXrEnabled = renderer.xr.enabled;
const currentShadowAutoUpdate = renderer.shadowMap.autoUpdate;
renderer.xr.enabled = false; // avoid camera modification
renderer.shadowMap.autoUpdate = false; // avoid re-computing shadows
renderer.setRenderTarget( renderTarget );
if ( renderer.autoClear === false ) renderer.clear();
renderer.render( scene, virtualCamera );
renderer.xr.enabled = currentXrEnabled;
renderer.shadowMap.autoUpdate = currentShadowAutoUpdate;
renderer.setRenderTarget( currentRenderTarget );
// restore viewport
const viewport = camera.viewport;
if ( viewport !== undefined ) {
renderer.state.viewport( viewport );
}
scope.visible = true;
}
//
this.onBeforeRender = function ( renderer, scene, camera ) {
// ensure refractors are rendered only once per frame
if ( camera.userData.refractor === true ) return;
// avoid rendering when the refractor is viewed from behind
if ( ! visible( camera ) === true ) return;
// update
updateRefractorPlane();
updateTextureMatrix( camera );
updateVirtualCamera( camera );
render( renderer, scene, camera );
};
/**
* Returns the reflector's internal render target.
*
* @return {WebGLRenderTarget} The internal render target
*/
this.getRenderTarget = function () {
return renderTarget;
};
/**
* Frees the GPU-related resources allocated by this instance. Call this
* method whenever this instance is no longer used in your app.
*/
this.dispose = function () {
renderTarget.dispose();
scope.material.dispose();
};
}
}
Refractor.RefractorShader = {
name: 'RefractorShader',
uniforms: {
'color': {
value: null
},
'tDiffuse': {
value: null
},
'textureMatrix': {
value: null
}
},
vertexShader: /* glsl */`
uniform mat4 textureMatrix;
varying vec4 vUv;
void main() {
vUv = textureMatrix * vec4( position, 1.0 );
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform vec3 color;
uniform sampler2D tDiffuse;
varying vec4 vUv;
float blendOverlay( float base, float blend ) {
return( base < 0.5 ? ( 2.0 * base * blend ) : ( 1.0 - 2.0 * ( 1.0 - base ) * ( 1.0 - blend ) ) );
}
vec3 blendOverlay( vec3 base, vec3 blend ) {
return vec3( blendOverlay( base.r, blend.r ), blendOverlay( base.g, blend.g ), blendOverlay( base.b, blend.b ) );
}
void main() {
vec4 base = texture2DProj( tDiffuse, vUv );
gl_FragColor = vec4( blendOverlay( base.rgb, color ), 1.0 );
#include <tonemapping_fragment>
#include <colorspace_fragment>
}`
};
/**
* Constructor options of `Refractor`.
*
* @typedef {Object} Refractor~Options
* @property {number|Color|string} [color=0x7F7F7F] - The refractor's color.
* @property {number} [textureWidth=512] - The texture width. A higher value results in more clear refractions but is also more expensive.
* @property {number} [textureHeight=512] - The texture height. A higher value results in more clear refractions but is also more expensive.
* @property {number} [clipBias=0] - The clip bias.
* @property {Object} [shader] - Can be used to pass in a custom shader that defines how the refractive view is projected onto the reflector's geometry.
* @property {number} [multisample=4] - How many samples to use for MSAA. `0` disables MSAA.
**/
export { Refractor };

130
node_modules/three/examples/jsm/objects/ShadowMesh.js generated vendored Normal file
View File

@@ -0,0 +1,130 @@
import {
Matrix4,
Mesh,
MeshBasicMaterial,
EqualStencilFunc,
IncrementStencilOp
} from 'three';
const _shadowMatrix = new Matrix4();
/**
* A Shadow Mesh that follows a shadow-casting mesh in the scene,
* but is confined to a single plane. This technique can be used as
* a very performant alternative to classic shadow mapping. However,
* it has serious limitations like:
*
* - Shadows can only be casted on flat planes.
* - No soft shadows support.
*
* ```js
* const cubeShadow = new ShadowMesh( cube );
* scene.add( cubeShadow );
* ```
*
* @augments Mesh
* @three_import import { ShadowMesh } from 'three/addons/objects/ShadowMesh.js';
*/
class ShadowMesh extends Mesh {
/**
* Constructs a new shadow mesh.
*
* @param {Mesh} mesh - The shadow-casting reference mesh.
*/
constructor( mesh ) {
const shadowMaterial = new MeshBasicMaterial( {
color: 0x000000,
transparent: true,
opacity: 0.6,
depthWrite: false,
stencilWrite: true,
stencilFunc: EqualStencilFunc,
stencilRef: 0,
stencilZPass: IncrementStencilOp
} );
super( mesh.geometry, shadowMaterial );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isShadowMesh = true;
/**
* Represent the world matrix of the reference mesh.
*
* @type {Matrix4}
*/
this.meshMatrix = mesh.matrixWorld;
/**
* Overwritten to disable view-frustum culling by default.
*
* @type {boolean}
* @default false
*/
this.frustumCulled = false;
/**
* Overwritten to disable automatic matrix update. The local
* matrix is computed manually in {@link ShadowMesh#update}.
*
* @type {boolean}
* @default false
*/
this.matrixAutoUpdate = false;
}
/**
* Updates the shadow mesh so it follows its shadow-casting reference mesh.
*
* @param {Plane} plane - The plane onto the shadow mesh is projected.
* @param {Vector4} lightPosition4D - The light position.
*/
update( plane, lightPosition4D ) {
// based on https://www.opengl.org/archives/resources/features/StencilTalk/tsld021.htm
const dot = plane.normal.x * lightPosition4D.x +
plane.normal.y * lightPosition4D.y +
plane.normal.z * lightPosition4D.z +
- plane.constant * lightPosition4D.w;
const sme = _shadowMatrix.elements;
sme[ 0 ] = dot - lightPosition4D.x * plane.normal.x;
sme[ 4 ] = - lightPosition4D.x * plane.normal.y;
sme[ 8 ] = - lightPosition4D.x * plane.normal.z;
sme[ 12 ] = - lightPosition4D.x * - plane.constant;
sme[ 1 ] = - lightPosition4D.y * plane.normal.x;
sme[ 5 ] = dot - lightPosition4D.y * plane.normal.y;
sme[ 9 ] = - lightPosition4D.y * plane.normal.z;
sme[ 13 ] = - lightPosition4D.y * - plane.constant;
sme[ 2 ] = - lightPosition4D.z * plane.normal.x;
sme[ 6 ] = - lightPosition4D.z * plane.normal.y;
sme[ 10 ] = dot - lightPosition4D.z * plane.normal.z;
sme[ 14 ] = - lightPosition4D.z * - plane.constant;
sme[ 3 ] = - lightPosition4D.w * plane.normal.x;
sme[ 7 ] = - lightPosition4D.w * plane.normal.y;
sme[ 11 ] = - lightPosition4D.w * plane.normal.z;
sme[ 15 ] = dot - lightPosition4D.w * - plane.constant;
this.matrix.multiplyMatrices( _shadowMatrix, this.meshMatrix );
}
}
export { ShadowMesh };

237
node_modules/three/examples/jsm/objects/Sky.js generated vendored Normal file
View File

@@ -0,0 +1,237 @@
import {
BackSide,
BoxGeometry,
Mesh,
ShaderMaterial,
UniformsUtils,
Vector3
} from 'three';
/**
* Represents a skydome for scene backgrounds. Based on [A Practical Analytic Model for Daylight]{@link https://www.researchgate.net/publication/220720443_A_Practical_Analytic_Model_for_Daylight}
* aka The Preetham Model, the de facto standard for analytical skydomes.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link SkyMesh}.
*
* More references:
*
* - {@link http://simonwallner.at/project/atmospheric-scattering/}
* - {@link http://blenderartists.org/forum/showthread.php?245954-preethams-sky-impementation-HDR}
*
*
* ```js
* const sky = new Sky();
* sky.scale.setScalar( 10000 );
* scene.add( sky );
* ```
*
* @augments Mesh
* @three_import import { Sky } from 'three/addons/objects/Sky.js';
*/
class Sky extends Mesh {
/**
* Constructs a new skydome.
*/
constructor() {
const shader = Sky.SkyShader;
const material = new ShaderMaterial( {
name: shader.name,
uniforms: UniformsUtils.clone( shader.uniforms ),
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader,
side: BackSide,
depthWrite: false
} );
super( new BoxGeometry( 1, 1, 1 ), material );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isSky = true;
}
}
Sky.SkyShader = {
name: 'SkyShader',
uniforms: {
'turbidity': { value: 2 },
'rayleigh': { value: 1 },
'mieCoefficient': { value: 0.005 },
'mieDirectionalG': { value: 0.8 },
'sunPosition': { value: new Vector3() },
'up': { value: new Vector3( 0, 1, 0 ) }
},
vertexShader: /* glsl */`
uniform vec3 sunPosition;
uniform float rayleigh;
uniform float turbidity;
uniform float mieCoefficient;
uniform vec3 up;
varying vec3 vWorldPosition;
varying vec3 vSunDirection;
varying float vSunfade;
varying vec3 vBetaR;
varying vec3 vBetaM;
varying float vSunE;
// constants for atmospheric scattering
const float e = 2.71828182845904523536028747135266249775724709369995957;
const float pi = 3.141592653589793238462643383279502884197169;
// wavelength of used primaries, according to preetham
const vec3 lambda = vec3( 680E-9, 550E-9, 450E-9 );
// this pre-calculation replaces older TotalRayleigh(vec3 lambda) function:
// (8.0 * pow(pi, 3.0) * pow(pow(n, 2.0) - 1.0, 2.0) * (6.0 + 3.0 * pn)) / (3.0 * N * pow(lambda, vec3(4.0)) * (6.0 - 7.0 * pn))
const vec3 totalRayleigh = vec3( 5.804542996261093E-6, 1.3562911419845635E-5, 3.0265902468824876E-5 );
// mie stuff
// K coefficient for the primaries
const float v = 4.0;
const vec3 K = vec3( 0.686, 0.678, 0.666 );
// MieConst = pi * pow( ( 2.0 * pi ) / lambda, vec3( v - 2.0 ) ) * K
const vec3 MieConst = vec3( 1.8399918514433978E14, 2.7798023919660528E14, 4.0790479543861094E14 );
// earth shadow hack
// cutoffAngle = pi / 1.95;
const float cutoffAngle = 1.6110731556870734;
const float steepness = 1.5;
const float EE = 1000.0;
float sunIntensity( float zenithAngleCos ) {
zenithAngleCos = clamp( zenithAngleCos, -1.0, 1.0 );
return EE * max( 0.0, 1.0 - pow( e, -( ( cutoffAngle - acos( zenithAngleCos ) ) / steepness ) ) );
}
vec3 totalMie( float T ) {
float c = ( 0.2 * T ) * 10E-18;
return 0.434 * c * MieConst;
}
void main() {
vec4 worldPosition = modelMatrix * vec4( position, 1.0 );
vWorldPosition = worldPosition.xyz;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
gl_Position.z = gl_Position.w; // set z to camera.far
vSunDirection = normalize( sunPosition );
vSunE = sunIntensity( dot( vSunDirection, up ) );
vSunfade = 1.0 - clamp( 1.0 - exp( ( sunPosition.y / 450000.0 ) ), 0.0, 1.0 );
float rayleighCoefficient = rayleigh - ( 1.0 * ( 1.0 - vSunfade ) );
// extinction (absorption + out scattering)
// rayleigh coefficients
vBetaR = totalRayleigh * rayleighCoefficient;
// mie coefficients
vBetaM = totalMie( turbidity ) * mieCoefficient;
}`,
fragmentShader: /* glsl */`
varying vec3 vWorldPosition;
varying vec3 vSunDirection;
varying float vSunfade;
varying vec3 vBetaR;
varying vec3 vBetaM;
varying float vSunE;
uniform float mieDirectionalG;
uniform vec3 up;
// constants for atmospheric scattering
const float pi = 3.141592653589793238462643383279502884197169;
const float n = 1.0003; // refractive index of air
const float N = 2.545E25; // number of molecules per unit volume for air at 288.15K and 1013mb (sea level -45 celsius)
// optical length at zenith for molecules
const float rayleighZenithLength = 8.4E3;
const float mieZenithLength = 1.25E3;
// 66 arc seconds -> degrees, and the cosine of that
const float sunAngularDiameterCos = 0.999956676946448443553574619906976478926848692873900859324;
// 3.0 / ( 16.0 * pi )
const float THREE_OVER_SIXTEENPI = 0.05968310365946075;
// 1.0 / ( 4.0 * pi )
const float ONE_OVER_FOURPI = 0.07957747154594767;
float rayleighPhase( float cosTheta ) {
return THREE_OVER_SIXTEENPI * ( 1.0 + pow( cosTheta, 2.0 ) );
}
float hgPhase( float cosTheta, float g ) {
float g2 = pow( g, 2.0 );
float inverse = 1.0 / pow( 1.0 - 2.0 * g * cosTheta + g2, 1.5 );
return ONE_OVER_FOURPI * ( ( 1.0 - g2 ) * inverse );
}
void main() {
vec3 direction = normalize( vWorldPosition - cameraPosition );
// optical length
// cutoff angle at 90 to avoid singularity in next formula.
float zenithAngle = acos( max( 0.0, dot( up, direction ) ) );
float inverse = 1.0 / ( cos( zenithAngle ) + 0.15 * pow( 93.885 - ( ( zenithAngle * 180.0 ) / pi ), -1.253 ) );
float sR = rayleighZenithLength * inverse;
float sM = mieZenithLength * inverse;
// combined extinction factor
vec3 Fex = exp( -( vBetaR * sR + vBetaM * sM ) );
// in scattering
float cosTheta = dot( direction, vSunDirection );
float rPhase = rayleighPhase( cosTheta * 0.5 + 0.5 );
vec3 betaRTheta = vBetaR * rPhase;
float mPhase = hgPhase( cosTheta, mieDirectionalG );
vec3 betaMTheta = vBetaM * mPhase;
vec3 Lin = pow( vSunE * ( ( betaRTheta + betaMTheta ) / ( vBetaR + vBetaM ) ) * ( 1.0 - Fex ), vec3( 1.5 ) );
Lin *= mix( vec3( 1.0 ), pow( vSunE * ( ( betaRTheta + betaMTheta ) / ( vBetaR + vBetaM ) ) * Fex, vec3( 1.0 / 2.0 ) ), clamp( pow( 1.0 - dot( up, vSunDirection ), 5.0 ), 0.0, 1.0 ) );
// nightsky
float theta = acos( direction.y ); // elevation --> y-axis, [-pi/2, pi/2]
float phi = atan( direction.z, direction.x ); // azimuth --> x-axis [-pi/2, pi/2]
vec2 uv = vec2( phi, theta ) / vec2( 2.0 * pi, pi ) + vec2( 0.5, 0.0 );
vec3 L0 = vec3( 0.1 ) * Fex;
// composition + solar disc
float sundisk = smoothstep( sunAngularDiameterCos, sunAngularDiameterCos + 0.00002, cosTheta );
L0 += ( vSunE * 19000.0 * Fex ) * sundisk;
vec3 texColor = ( Lin + L0 ) * 0.04 + vec3( 0.0, 0.0003, 0.00075 );
vec3 retColor = pow( texColor, vec3( 1.0 / ( 1.2 + ( 1.2 * vSunfade ) ) ) );
gl_FragColor = vec4( retColor, 1.0 );
#include <tonemapping_fragment>
#include <colorspace_fragment>
}`
};
export { Sky };

243
node_modules/three/examples/jsm/objects/SkyMesh.js generated vendored Normal file
View File

@@ -0,0 +1,243 @@
import {
BackSide,
BoxGeometry,
Mesh,
Vector3,
NodeMaterial
} from 'three/webgpu';
import { Fn, float, vec3, acos, add, mul, clamp, cos, dot, exp, max, mix, modelViewProjection, normalize, positionWorld, pow, smoothstep, sub, varyingProperty, vec4, uniform, cameraPosition } from 'three/tsl';
/**
* Represents a skydome for scene backgrounds. Based on [A Practical Analytic Model for Daylight]{@link https://www.researchgate.net/publication/220720443_A_Practical_Analytic_Model_for_Daylight}
* aka The Preetham Model, the de facto standard for analytical skydomes.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link SkyMesh}.
*
* More references:
*
* - {@link http://simonwallner.at/project/atmospheric-scattering/}
* - {@link http://blenderartists.org/forum/showthread.php?245954-preethams-sky-impementation-HDR}
*
* ```js
* const sky = new SkyMesh();
* sky.scale.setScalar( 10000 );
* scene.add( sky );
* ```
*
* @augments Mesh
* @three_import import { SkyMesh } from 'three/addons/objects/SkyMesh.js';
*/
class SkyMesh extends Mesh {
/**
* Constructs a new skydome.
*/
constructor() {
const material = new NodeMaterial();
super( new BoxGeometry( 1, 1, 1 ), material );
/**
* The turbidity uniform.
*
* @type {UniformNode<float>}
*/
this.turbidity = uniform( 2 );
/**
* The rayleigh uniform.
*
* @type {UniformNode<float>}
*/
this.rayleigh = uniform( 1 );
/**
* The mieCoefficient uniform.
*
* @type {UniformNode<float>}
*/
this.mieCoefficient = uniform( 0.005 );
/**
* The mieDirectionalG uniform.
*
* @type {UniformNode<float>}
*/
this.mieDirectionalG = uniform( 0.8 );
/**
* The sun position uniform.
*
* @type {UniformNode<vec3>}
*/
this.sunPosition = uniform( new Vector3() );
/**
* The up position.
*
* @type {UniformNode<vec3>}
*/
this.upUniform = uniform( new Vector3( 0, 1, 0 ) );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isSky = true;
// Varyings
const vSunDirection = varyingProperty( 'vec3' );
const vSunE = varyingProperty( 'float' );
const vSunfade = varyingProperty( 'float' );
const vBetaR = varyingProperty( 'vec3' );
const vBetaM = varyingProperty( 'vec3' );
const vertexNode = /*@__PURE__*/ Fn( () => {
// constants for atmospheric scattering
const e = float( 2.71828182845904523536028747135266249775724709369995957 );
// const pi = float( 3.141592653589793238462643383279502884197169 );
// wavelength of used primaries, according to preetham
// const lambda = vec3( 680E-9, 550E-9, 450E-9 );
// this pre-calculation replaces older TotalRayleigh(vec3 lambda) function:
// (8.0 * pow(pi, 3.0) * pow(pow(n, 2.0) - 1.0, 2.0) * (6.0 + 3.0 * pn)) / (3.0 * N * pow(lambda, vec3(4.0)) * (6.0 - 7.0 * pn))
const totalRayleigh = vec3( 5.804542996261093E-6, 1.3562911419845635E-5, 3.0265902468824876E-5 );
// mie stuff
// K coefficient for the primaries
// const v = float( 4.0 );
// const K = vec3( 0.686, 0.678, 0.666 );
// MieConst = pi * pow( ( 2.0 * pi ) / lambda, vec3( v - 2.0 ) ) * K
const MieConst = vec3( 1.8399918514433978E14, 2.7798023919660528E14, 4.0790479543861094E14 );
// earth shadow hack
// cutoffAngle = pi / 1.95;
const cutoffAngle = float( 1.6110731556870734 );
const steepness = float( 1.5 );
const EE = float( 1000.0 );
// varying sun position
const sunDirection = normalize( this.sunPosition );
vSunDirection.assign( sunDirection );
// varying sun intensity
const angle = dot( sunDirection, this.upUniform );
const zenithAngleCos = clamp( angle, - 1, 1 );
const sunIntensity = EE.mul( max( 0.0, float( 1.0 ).sub( pow( e, cutoffAngle.sub( acos( zenithAngleCos ) ).div( steepness ).negate() ) ) ) );
vSunE.assign( sunIntensity );
// varying sun fade
const sunfade = float( 1.0 ).sub( clamp( float( 1.0 ).sub( exp( this.sunPosition.y.div( 450000.0 ) ) ), 0, 1 ) );
vSunfade.assign( sunfade );
// varying vBetaR
const rayleighCoefficient = this.rayleigh.sub( float( 1.0 ).mul( float( 1.0 ).sub( sunfade ) ) );
// extinction (absorption + out scattering)
// rayleigh coefficients
vBetaR.assign( totalRayleigh.mul( rayleighCoefficient ) );
// varying vBetaM
const c = float( 0.2 ).mul( this.turbidity ).mul( 10E-18 );
const totalMie = float( 0.434 ).mul( c ).mul( MieConst );
vBetaM.assign( totalMie.mul( this.mieCoefficient ) );
// position
const position = modelViewProjection;
position.z.assign( position.w ); // set z to camera.far
return position;
} )();
const colorNode = /*@__PURE__*/ Fn( () => {
// constants for atmospheric scattering
const pi = float( 3.141592653589793238462643383279502884197169 );
// optical length at zenith for molecules
const rayleighZenithLength = float( 8.4E3 );
const mieZenithLength = float( 1.25E3 );
// 66 arc seconds -> degrees, and the cosine of that
const sunAngularDiameterCos = float( 0.999956676946448443553574619906976478926848692873900859324 );
// 3.0 / ( 16.0 * pi )
const THREE_OVER_SIXTEENPI = float( 0.05968310365946075 );
// 1.0 / ( 4.0 * pi )
const ONE_OVER_FOURPI = float( 0.07957747154594767 );
//
const direction = normalize( positionWorld.sub( cameraPosition ) );
// optical length
// cutoff angle at 90 to avoid singularity in next formula.
const zenithAngle = acos( max( 0.0, dot( this.upUniform, direction ) ) );
const inverse = float( 1.0 ).div( cos( zenithAngle ).add( float( 0.15 ).mul( pow( float( 93.885 ).sub( zenithAngle.mul( 180.0 ).div( pi ) ), - 1.253 ) ) ) );
const sR = rayleighZenithLength.mul( inverse );
const sM = mieZenithLength.mul( inverse );
// combined extinction factor
const Fex = exp( mul( vBetaR, sR ).add( mul( vBetaM, sM ) ).negate() );
// in scattering
const cosTheta = dot( direction, vSunDirection );
// betaRTheta
const c = cosTheta.mul( 0.5 ).add( 0.5 );
const rPhase = THREE_OVER_SIXTEENPI.mul( float( 1.0 ).add( pow( c, 2.0 ) ) );
const betaRTheta = vBetaR.mul( rPhase );
// betaMTheta
const g2 = pow( this.mieDirectionalG, 2.0 );
const inv = float( 1.0 ).div( pow( float( 1.0 ).sub( float( 2.0 ).mul( this.mieDirectionalG ).mul( cosTheta ) ).add( g2 ), 1.5 ) );
const mPhase = ONE_OVER_FOURPI.mul( float( 1.0 ).sub( g2 ) ).mul( inv );
const betaMTheta = vBetaM.mul( mPhase );
const Lin = pow( vSunE.mul( add( betaRTheta, betaMTheta ).div( add( vBetaR, vBetaM ) ) ).mul( sub( 1.0, Fex ) ), vec3( 1.5 ) );
Lin.mulAssign( mix( vec3( 1.0 ), pow( vSunE.mul( add( betaRTheta, betaMTheta ).div( add( vBetaR, vBetaM ) ) ).mul( Fex ), vec3( 1.0 / 2.0 ) ), clamp( pow( sub( 1.0, dot( this.upUniform, vSunDirection ) ), 5.0 ), 0.0, 1.0 ) ) );
// nightsky
const L0 = vec3( 0.1 ).mul( Fex );
// composition + solar disc
const sundisk = smoothstep( sunAngularDiameterCos, sunAngularDiameterCos.add( 0.00002 ), cosTheta );
L0.addAssign( vSunE.mul( 19000.0 ).mul( Fex ).mul( sundisk ) );
const texColor = add( Lin, L0 ).mul( 0.04 ).add( vec3( 0.0, 0.0003, 0.00075 ) );
const retColor = pow( texColor, vec3( float( 1.0 ).div( float( 1.2 ).add( vSunfade.mul( 1.2 ) ) ) ) );
return vec4( retColor, 1.0 );
} )();
material.side = BackSide;
material.depthWrite = false;
material.vertexNode = vertexNode;
material.colorNode = colorNode;
}
}
export { SkyMesh };

373
node_modules/three/examples/jsm/objects/Water.js generated vendored Normal file
View File

@@ -0,0 +1,373 @@
import {
Color,
FrontSide,
Matrix4,
Mesh,
PerspectiveCamera,
Plane,
ShaderMaterial,
UniformsLib,
UniformsUtils,
Vector3,
Vector4,
WebGLRenderTarget
} from 'three';
/**
* A basic flat, reflective water effect.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link WaterMesh}.
*
* References:
*
* - [Flat mirror for three.js]{@link https://github.com/Slayvin}
* - [An implementation of water shader based on the flat mirror]{@link https://home.adelphi.edu/~stemkoski/}
* - [Water shader explanations in WebGL]{@link http://29a.ch/slides/2012/webglwater/ }
*
* @augments Mesh
* @three_import import { Water } from 'three/addons/objects/Water.js';
*/
class Water extends Mesh {
/**
* Constructs a new water instance.
*
* @param {BufferGeometry} geometry - The water's geometry.
* @param {Water~Options} [options] - The configuration options.
*/
constructor( geometry, options = {} ) {
super( geometry );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isWater = true;
const scope = this;
const textureWidth = options.textureWidth !== undefined ? options.textureWidth : 512;
const textureHeight = options.textureHeight !== undefined ? options.textureHeight : 512;
const clipBias = options.clipBias !== undefined ? options.clipBias : 0.0;
const alpha = options.alpha !== undefined ? options.alpha : 1.0;
const time = options.time !== undefined ? options.time : 0.0;
const normalSampler = options.waterNormals !== undefined ? options.waterNormals : null;
const sunDirection = options.sunDirection !== undefined ? options.sunDirection : new Vector3( 0.70707, 0.70707, 0.0 );
const sunColor = new Color( options.sunColor !== undefined ? options.sunColor : 0xffffff );
const waterColor = new Color( options.waterColor !== undefined ? options.waterColor : 0x7F7F7F );
const eye = options.eye !== undefined ? options.eye : new Vector3( 0, 0, 0 );
const distortionScale = options.distortionScale !== undefined ? options.distortionScale : 20.0;
const side = options.side !== undefined ? options.side : FrontSide;
const fog = options.fog !== undefined ? options.fog : false;
//
const mirrorPlane = new Plane();
const normal = new Vector3();
const mirrorWorldPosition = new Vector3();
const cameraWorldPosition = new Vector3();
const rotationMatrix = new Matrix4();
const lookAtPosition = new Vector3( 0, 0, - 1 );
const clipPlane = new Vector4();
const view = new Vector3();
const target = new Vector3();
const q = new Vector4();
const textureMatrix = new Matrix4();
const mirrorCamera = new PerspectiveCamera();
const renderTarget = new WebGLRenderTarget( textureWidth, textureHeight );
const mirrorShader = {
name: 'MirrorShader',
uniforms: UniformsUtils.merge( [
UniformsLib[ 'fog' ],
UniformsLib[ 'lights' ],
{
'normalSampler': { value: null },
'mirrorSampler': { value: null },
'alpha': { value: 1.0 },
'time': { value: 0.0 },
'size': { value: 1.0 },
'distortionScale': { value: 20.0 },
'textureMatrix': { value: new Matrix4() },
'sunColor': { value: new Color( 0x7F7F7F ) },
'sunDirection': { value: new Vector3( 0.70707, 0.70707, 0 ) },
'eye': { value: new Vector3() },
'waterColor': { value: new Color( 0x555555 ) }
}
] ),
vertexShader: /* glsl */`
uniform mat4 textureMatrix;
uniform float time;
varying vec4 mirrorCoord;
varying vec4 worldPosition;
#include <common>
#include <fog_pars_vertex>
#include <shadowmap_pars_vertex>
#include <logdepthbuf_pars_vertex>
void main() {
mirrorCoord = modelMatrix * vec4( position, 1.0 );
worldPosition = mirrorCoord.xyzw;
mirrorCoord = textureMatrix * mirrorCoord;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
#include <beginnormal_vertex>
#include <defaultnormal_vertex>
#include <logdepthbuf_vertex>
#include <fog_vertex>
#include <shadowmap_vertex>
}`,
fragmentShader: /* glsl */`
uniform sampler2D mirrorSampler;
uniform float alpha;
uniform float time;
uniform float size;
uniform float distortionScale;
uniform sampler2D normalSampler;
uniform vec3 sunColor;
uniform vec3 sunDirection;
uniform vec3 eye;
uniform vec3 waterColor;
varying vec4 mirrorCoord;
varying vec4 worldPosition;
vec4 getNoise( vec2 uv ) {
vec2 uv0 = ( uv / 103.0 ) + vec2(time / 17.0, time / 29.0);
vec2 uv1 = uv / 107.0-vec2( time / -19.0, time / 31.0 );
vec2 uv2 = uv / vec2( 8907.0, 9803.0 ) + vec2( time / 101.0, time / 97.0 );
vec2 uv3 = uv / vec2( 1091.0, 1027.0 ) - vec2( time / 109.0, time / -113.0 );
vec4 noise = texture2D( normalSampler, uv0 ) +
texture2D( normalSampler, uv1 ) +
texture2D( normalSampler, uv2 ) +
texture2D( normalSampler, uv3 );
return noise * 0.5 - 1.0;
}
void sunLight( const vec3 surfaceNormal, const vec3 eyeDirection, float shiny, float spec, float diffuse, inout vec3 diffuseColor, inout vec3 specularColor ) {
vec3 reflection = normalize( reflect( -sunDirection, surfaceNormal ) );
float direction = max( 0.0, dot( eyeDirection, reflection ) );
specularColor += pow( direction, shiny ) * sunColor * spec;
diffuseColor += max( dot( sunDirection, surfaceNormal ), 0.0 ) * sunColor * diffuse;
}
#include <common>
#include <packing>
#include <bsdfs>
#include <fog_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <lights_pars_begin>
#include <shadowmap_pars_fragment>
#include <shadowmask_pars_fragment>
void main() {
#include <logdepthbuf_fragment>
vec4 noise = getNoise( worldPosition.xz * size );
vec3 surfaceNormal = normalize( noise.xzy * vec3( 1.5, 1.0, 1.5 ) );
vec3 diffuseLight = vec3(0.0);
vec3 specularLight = vec3(0.0);
vec3 worldToEye = eye-worldPosition.xyz;
vec3 eyeDirection = normalize( worldToEye );
sunLight( surfaceNormal, eyeDirection, 100.0, 2.0, 0.5, diffuseLight, specularLight );
float distance = length(worldToEye);
vec2 distortion = surfaceNormal.xz * ( 0.001 + 1.0 / distance ) * distortionScale;
vec3 reflectionSample = vec3( texture2D( mirrorSampler, mirrorCoord.xy / mirrorCoord.w + distortion ) );
float theta = max( dot( eyeDirection, surfaceNormal ), 0.0 );
float rf0 = 0.3;
float reflectance = rf0 + ( 1.0 - rf0 ) * pow( ( 1.0 - theta ), 5.0 );
vec3 scatter = max( 0.0, dot( surfaceNormal, eyeDirection ) ) * waterColor;
vec3 albedo = mix( ( sunColor * diffuseLight * 0.3 + scatter ) * getShadowMask(), ( vec3( 0.1 ) + reflectionSample * 0.9 + reflectionSample * specularLight ), reflectance);
vec3 outgoingLight = albedo;
gl_FragColor = vec4( outgoingLight, alpha );
#include <tonemapping_fragment>
#include <colorspace_fragment>
#include <fog_fragment>
}`
};
const material = new ShaderMaterial( {
name: mirrorShader.name,
uniforms: UniformsUtils.clone( mirrorShader.uniforms ),
vertexShader: mirrorShader.vertexShader,
fragmentShader: mirrorShader.fragmentShader,
lights: true,
side: side,
fog: fog
} );
material.uniforms[ 'mirrorSampler' ].value = renderTarget.texture;
material.uniforms[ 'textureMatrix' ].value = textureMatrix;
material.uniforms[ 'alpha' ].value = alpha;
material.uniforms[ 'time' ].value = time;
material.uniforms[ 'normalSampler' ].value = normalSampler;
material.uniforms[ 'sunColor' ].value = sunColor;
material.uniforms[ 'waterColor' ].value = waterColor;
material.uniforms[ 'sunDirection' ].value = sunDirection;
material.uniforms[ 'distortionScale' ].value = distortionScale;
material.uniforms[ 'eye' ].value = eye;
scope.material = material;
scope.onBeforeRender = function ( renderer, scene, camera ) {
mirrorWorldPosition.setFromMatrixPosition( scope.matrixWorld );
cameraWorldPosition.setFromMatrixPosition( camera.matrixWorld );
rotationMatrix.extractRotation( scope.matrixWorld );
normal.set( 0, 0, 1 );
normal.applyMatrix4( rotationMatrix );
view.subVectors( mirrorWorldPosition, cameraWorldPosition );
// Avoid rendering when mirror is facing away
if ( view.dot( normal ) > 0 ) return;
view.reflect( normal ).negate();
view.add( mirrorWorldPosition );
rotationMatrix.extractRotation( camera.matrixWorld );
lookAtPosition.set( 0, 0, - 1 );
lookAtPosition.applyMatrix4( rotationMatrix );
lookAtPosition.add( cameraWorldPosition );
target.subVectors( mirrorWorldPosition, lookAtPosition );
target.reflect( normal ).negate();
target.add( mirrorWorldPosition );
mirrorCamera.position.copy( view );
mirrorCamera.up.set( 0, 1, 0 );
mirrorCamera.up.applyMatrix4( rotationMatrix );
mirrorCamera.up.reflect( normal );
mirrorCamera.lookAt( target );
mirrorCamera.far = camera.far; // Used in WebGLBackground
mirrorCamera.updateMatrixWorld();
mirrorCamera.projectionMatrix.copy( camera.projectionMatrix );
// Update the texture matrix
textureMatrix.set(
0.5, 0.0, 0.0, 0.5,
0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 0.5, 0.5,
0.0, 0.0, 0.0, 1.0
);
textureMatrix.multiply( mirrorCamera.projectionMatrix );
textureMatrix.multiply( mirrorCamera.matrixWorldInverse );
// Now update projection matrix with new clip plane, implementing code from: http://www.terathon.com/code/oblique.html
// Paper explaining this technique: http://www.terathon.com/lengyel/Lengyel-Oblique.pdf
mirrorPlane.setFromNormalAndCoplanarPoint( normal, mirrorWorldPosition );
mirrorPlane.applyMatrix4( mirrorCamera.matrixWorldInverse );
clipPlane.set( mirrorPlane.normal.x, mirrorPlane.normal.y, mirrorPlane.normal.z, mirrorPlane.constant );
const projectionMatrix = mirrorCamera.projectionMatrix;
q.x = ( Math.sign( clipPlane.x ) + projectionMatrix.elements[ 8 ] ) / projectionMatrix.elements[ 0 ];
q.y = ( Math.sign( clipPlane.y ) + projectionMatrix.elements[ 9 ] ) / projectionMatrix.elements[ 5 ];
q.z = - 1.0;
q.w = ( 1.0 + projectionMatrix.elements[ 10 ] ) / projectionMatrix.elements[ 14 ];
// Calculate the scaled plane vector
clipPlane.multiplyScalar( 2.0 / clipPlane.dot( q ) );
// Replacing the third row of the projection matrix
projectionMatrix.elements[ 2 ] = clipPlane.x;
projectionMatrix.elements[ 6 ] = clipPlane.y;
projectionMatrix.elements[ 10 ] = clipPlane.z + 1.0 - clipBias;
projectionMatrix.elements[ 14 ] = clipPlane.w;
eye.setFromMatrixPosition( camera.matrixWorld );
// Render
const currentRenderTarget = renderer.getRenderTarget();
const currentXrEnabled = renderer.xr.enabled;
const currentShadowAutoUpdate = renderer.shadowMap.autoUpdate;
scope.visible = false;
renderer.xr.enabled = false; // Avoid camera modification and recursion
renderer.shadowMap.autoUpdate = false; // Avoid re-computing shadows
renderer.setRenderTarget( renderTarget );
renderer.state.buffers.depth.setMask( true ); // make sure the depth buffer is writable so it can be properly cleared, see #18897
if ( renderer.autoClear === false ) renderer.clear();
renderer.render( scene, mirrorCamera );
scope.visible = true;
renderer.xr.enabled = currentXrEnabled;
renderer.shadowMap.autoUpdate = currentShadowAutoUpdate;
renderer.setRenderTarget( currentRenderTarget );
// Restore viewport
const viewport = camera.viewport;
if ( viewport !== undefined ) {
renderer.state.viewport( viewport );
}
};
}
}
/**
* Constructor options of `Water`.
*
* @typedef {Object} Water~Options
* @property {number} [textureWidth=512] - The texture width. A higher value results in more clear reflections but is also more expensive.
* @property {number} [textureHeight=512] - The texture height. A higher value results in more clear reflections but is also more expensive.
* @property {number} [clipBias=0] - The clip bias.
* @property {number} [alpha=1] - The alpha value.
* @property {number} [time=0] - The time value.
* @property {?Texture} [waterNormals=null] - The water's normal map.
* @property {Vector3} [sunDirection=(0.70707,0.70707,0.0)] - The sun direction.
* @property {number|Color|string} [sunColor=0xffffff] - The sun color.
* @property {number|Color|string} [waterColor=0x7F7F7F] - The water color.
* @property {Vector3} [eye] - The eye vector.
* @property {number} [distortionScale=20] - The distortion scale.
* @property {(FrontSide|BackSide|DoubleSide)} [side=FrontSide] - The water material's `side` property.
* @property {boolean} [fog=false] - Whether the water should be affected by fog or not.
**/
export { Water };

401
node_modules/three/examples/jsm/objects/Water2.js generated vendored Normal file
View File

@@ -0,0 +1,401 @@
import {
Clock,
Color,
Matrix4,
Mesh,
RepeatWrapping,
ShaderMaterial,
TextureLoader,
UniformsLib,
UniformsUtils,
Vector2,
Vector4
} from 'three';
import { Reflector } from '../objects/Reflector.js';
import { Refractor } from '../objects/Refractor.js';
/** @module Water2 */
/**
* An advanced water effect that supports reflections, refractions and flow maps.
*
* Note that this class can only be used with {@link WebGLRenderer}.
* When using {@link WebGPURenderer}, use {@link module:Water2Mesh}.
*
* References:
*
* - {@link https://alex.vlachos.com/graphics/Vlachos-SIGGRAPH10-WaterFlow.pdf}
* - {@link http://graphicsrunner.blogspot.de/2010/08/water-using-flow-maps.html}
*
* @augments Mesh
* @three_import import { Water } from 'three/addons/objects/Water2.js';
*/
class Water extends Mesh {
/**
* Constructs a new water instance.
*
* @param {BufferGeometry} geometry - The water's geometry.
* @param {module:Water2~Options} [options] - The configuration options.
*/
constructor( geometry, options = {} ) {
super( geometry );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isWater = true;
this.type = 'Water';
const scope = this;
const color = ( options.color !== undefined ) ? new Color( options.color ) : new Color( 0xFFFFFF );
const textureWidth = options.textureWidth !== undefined ? options.textureWidth : 512;
const textureHeight = options.textureHeight !== undefined ? options.textureHeight : 512;
const clipBias = options.clipBias !== undefined ? options.clipBias : 0;
const flowDirection = options.flowDirection !== undefined ? options.flowDirection : new Vector2( 1, 0 );
const flowSpeed = options.flowSpeed !== undefined ? options.flowSpeed : 0.03;
const reflectivity = options.reflectivity !== undefined ? options.reflectivity : 0.02;
const scale = options.scale !== undefined ? options.scale : 1;
const shader = options.shader !== undefined ? options.shader : Water.WaterShader;
const textureLoader = new TextureLoader();
const flowMap = options.flowMap || undefined;
const normalMap0 = options.normalMap0 || textureLoader.load( 'textures/water/Water_1_M_Normal.jpg' );
const normalMap1 = options.normalMap1 || textureLoader.load( 'textures/water/Water_2_M_Normal.jpg' );
const cycle = 0.15; // a cycle of a flow map phase
const halfCycle = cycle * 0.5;
const textureMatrix = new Matrix4();
const clock = new Clock();
// internal components
if ( Reflector === undefined ) {
console.error( 'THREE.Water: Required component Reflector not found.' );
return;
}
if ( Refractor === undefined ) {
console.error( 'THREE.Water: Required component Refractor not found.' );
return;
}
const reflector = new Reflector( geometry, {
textureWidth: textureWidth,
textureHeight: textureHeight,
clipBias: clipBias
} );
const refractor = new Refractor( geometry, {
textureWidth: textureWidth,
textureHeight: textureHeight,
clipBias: clipBias
} );
reflector.matrixAutoUpdate = false;
refractor.matrixAutoUpdate = false;
// material
this.material = new ShaderMaterial( {
name: shader.name,
uniforms: UniformsUtils.merge( [
UniformsLib[ 'fog' ],
shader.uniforms
] ),
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader,
transparent: true,
fog: true
} );
if ( flowMap !== undefined ) {
this.material.defines.USE_FLOWMAP = '';
this.material.uniforms[ 'tFlowMap' ] = {
type: 't',
value: flowMap
};
} else {
this.material.uniforms[ 'flowDirection' ] = {
type: 'v2',
value: flowDirection
};
}
// maps
normalMap0.wrapS = normalMap0.wrapT = RepeatWrapping;
normalMap1.wrapS = normalMap1.wrapT = RepeatWrapping;
this.material.uniforms[ 'tReflectionMap' ].value = reflector.getRenderTarget().texture;
this.material.uniforms[ 'tRefractionMap' ].value = refractor.getRenderTarget().texture;
this.material.uniforms[ 'tNormalMap0' ].value = normalMap0;
this.material.uniforms[ 'tNormalMap1' ].value = normalMap1;
// water
this.material.uniforms[ 'color' ].value = color;
this.material.uniforms[ 'reflectivity' ].value = reflectivity;
this.material.uniforms[ 'textureMatrix' ].value = textureMatrix;
// initial values
this.material.uniforms[ 'config' ].value.x = 0; // flowMapOffset0
this.material.uniforms[ 'config' ].value.y = halfCycle; // flowMapOffset1
this.material.uniforms[ 'config' ].value.z = halfCycle; // halfCycle
this.material.uniforms[ 'config' ].value.w = scale; // scale
// functions
function updateTextureMatrix( camera ) {
textureMatrix.set(
0.5, 0.0, 0.0, 0.5,
0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 0.5, 0.5,
0.0, 0.0, 0.0, 1.0
);
textureMatrix.multiply( camera.projectionMatrix );
textureMatrix.multiply( camera.matrixWorldInverse );
textureMatrix.multiply( scope.matrixWorld );
}
function updateFlow() {
const delta = clock.getDelta();
const config = scope.material.uniforms[ 'config' ];
config.value.x += flowSpeed * delta; // flowMapOffset0
config.value.y = config.value.x + halfCycle; // flowMapOffset1
// Important: The distance between offsets should be always the value of "halfCycle".
// Moreover, both offsets should be in the range of [ 0, cycle ].
// This approach ensures a smooth water flow and avoids "reset" effects.
if ( config.value.x >= cycle ) {
config.value.x = 0;
config.value.y = halfCycle;
} else if ( config.value.y >= cycle ) {
config.value.y = config.value.y - cycle;
}
}
//
this.onBeforeRender = function ( renderer, scene, camera ) {
updateTextureMatrix( camera );
updateFlow();
scope.visible = false;
reflector.matrixWorld.copy( scope.matrixWorld );
refractor.matrixWorld.copy( scope.matrixWorld );
reflector.onBeforeRender( renderer, scene, camera );
refractor.onBeforeRender( renderer, scene, camera );
scope.visible = true;
};
}
}
Water.WaterShader = {
name: 'WaterShader',
uniforms: {
'color': {
type: 'c',
value: null
},
'reflectivity': {
type: 'f',
value: 0
},
'tReflectionMap': {
type: 't',
value: null
},
'tRefractionMap': {
type: 't',
value: null
},
'tNormalMap0': {
type: 't',
value: null
},
'tNormalMap1': {
type: 't',
value: null
},
'textureMatrix': {
type: 'm4',
value: null
},
'config': {
type: 'v4',
value: new Vector4()
}
},
vertexShader: /* glsl */`
#include <common>
#include <fog_pars_vertex>
#include <logdepthbuf_pars_vertex>
uniform mat4 textureMatrix;
varying vec4 vCoord;
varying vec2 vUv;
varying vec3 vToEye;
void main() {
vUv = uv;
vCoord = textureMatrix * vec4( position, 1.0 );
vec4 worldPosition = modelMatrix * vec4( position, 1.0 );
vToEye = cameraPosition - worldPosition.xyz;
vec4 mvPosition = viewMatrix * worldPosition; // used in fog_vertex
gl_Position = projectionMatrix * mvPosition;
#include <logdepthbuf_vertex>
#include <fog_vertex>
}`,
fragmentShader: /* glsl */`
#include <common>
#include <fog_pars_fragment>
#include <logdepthbuf_pars_fragment>
uniform sampler2D tReflectionMap;
uniform sampler2D tRefractionMap;
uniform sampler2D tNormalMap0;
uniform sampler2D tNormalMap1;
#ifdef USE_FLOWMAP
uniform sampler2D tFlowMap;
#else
uniform vec2 flowDirection;
#endif
uniform vec3 color;
uniform float reflectivity;
uniform vec4 config;
varying vec4 vCoord;
varying vec2 vUv;
varying vec3 vToEye;
void main() {
#include <logdepthbuf_fragment>
float flowMapOffset0 = config.x;
float flowMapOffset1 = config.y;
float halfCycle = config.z;
float scale = config.w;
vec3 toEye = normalize( vToEye );
// determine flow direction
vec2 flow;
#ifdef USE_FLOWMAP
flow = texture2D( tFlowMap, vUv ).rg * 2.0 - 1.0;
#else
flow = flowDirection;
#endif
flow.x *= - 1.0;
// sample normal maps (distort uvs with flowdata)
vec4 normalColor0 = texture2D( tNormalMap0, ( vUv * scale ) + flow * flowMapOffset0 );
vec4 normalColor1 = texture2D( tNormalMap1, ( vUv * scale ) + flow * flowMapOffset1 );
// linear interpolate to get the final normal color
float flowLerp = abs( halfCycle - flowMapOffset0 ) / halfCycle;
vec4 normalColor = mix( normalColor0, normalColor1, flowLerp );
// calculate normal vector
vec3 normal = normalize( vec3( normalColor.r * 2.0 - 1.0, normalColor.b, normalColor.g * 2.0 - 1.0 ) );
// calculate the fresnel term to blend reflection and refraction maps
float theta = max( dot( toEye, normal ), 0.0 );
float reflectance = reflectivity + ( 1.0 - reflectivity ) * pow( ( 1.0 - theta ), 5.0 );
// calculate final uv coords
vec3 coord = vCoord.xyz / vCoord.w;
vec2 uv = coord.xy + coord.z * normal.xz * 0.05;
vec4 reflectColor = texture2D( tReflectionMap, vec2( 1.0 - uv.x, uv.y ) );
vec4 refractColor = texture2D( tRefractionMap, uv );
// multiply water color with the mix of both textures
gl_FragColor = vec4( color, 1.0 ) * mix( refractColor, reflectColor, reflectance );
#include <tonemapping_fragment>
#include <colorspace_fragment>
#include <fog_fragment>
}`
};
/**
* Constructor options of `Water`.
*
* @typedef {Object} module:Water2~Options
* @property {number|Color|string} [color=0xFFFFFF] - The water color.
* @property {number} [textureWidth=512] - The texture width. A higher value results in better quality but is also more expensive.
* @property {number} [textureHeight=512] - The texture height. A higher value results in better quality but is also more expensive.
* @property {number} [clipBias=0] - The clip bias.
* @property {Vector2} [flowDirection=(1,0)] - The water's flow direction.
* @property {number} [flowSpeed=0.03] - The water's flow speed.
* @property {number} [reflectivity=0.02] - The water's reflectivity.
* @property {number} [scale=1] - The water's scale.
* @property {Object} [shader] - A custom water shader.
* @property {?Texture} [flowMap=null] - The flow map. If no flow map is assigned, the water flow is defined by `flowDirection`.
* @property {?Texture} [normalMap0] - The first water normal map.
* @property {?Texture} [normalMap1] - The second water normal map.
**/
export { Water };

199
node_modules/three/examples/jsm/objects/Water2Mesh.js generated vendored Normal file
View File

@@ -0,0 +1,199 @@
import {
Color,
Mesh,
Vector2,
Vector3,
NodeMaterial,
NodeUpdateType,
TempNode
} from 'three/webgpu';
import { Fn, vec2, viewportSafeUV, viewportSharedTexture, reflector, pow, float, abs, texture, uniform, vec4, cameraPosition, positionWorld, uv, mix, vec3, normalize, max, dot, screenUV } from 'three/tsl';
/** @module Water2Mesh */
/**
* An advanced water effect that supports reflections, refractions and flow maps.
*
* Note that this class can only be used with {@link WebGPURenderer}.
* When using {@link WebGLRenderer}, use {@link module:Water2}.
*
* References:
*
* - {@link https://alex.vlachos.com/graphics/Vlachos-SIGGRAPH10-WaterFlow.pdf}
* - {@link http://graphicsrunner.blogspot.de/2010/08/water-using-flow-maps.html}
*
* @augments Mesh
* @three_import import { WaterMesh } from 'three/addons/objects/Water2Mesh.js';
*/
class WaterMesh extends Mesh {
/**
* Constructs a new water mesh.
*
* @param {BufferGeometry} geometry - The water's geometry.
* @param {module:Water2~Options} [options] - The configuration options.
*/
constructor( geometry, options = {} ) {
const material = new NodeMaterial();
material.transparent = true;
super( geometry, material );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isWater = true;
material.colorNode = new WaterNode( options, this );
}
}
class WaterNode extends TempNode {
constructor( options, waterBody ) {
super( 'vec4' );
this.waterBody = waterBody;
this.normalMap0 = texture( options.normalMap0 );
this.normalMap1 = texture( options.normalMap1 );
this.flowMap = texture( options.flowMap !== undefined ? options.flowMap : null );
this.color = uniform( options.color !== undefined ? new Color( options.color ) : new Color( 0xffffff ) );
this.flowDirection = uniform( options.flowDirection !== undefined ? options.flowDirection : new Vector2( 1, 0 ) );
this.flowSpeed = uniform( options.flowSpeed !== undefined ? options.flowSpeed : 0.03 );
this.reflectivity = uniform( options.reflectivity !== undefined ? options.reflectivity : 0.02 );
this.scale = uniform( options.scale !== undefined ? options.scale : 1 );
this.flowConfig = uniform( new Vector3() );
this.updateBeforeType = NodeUpdateType.RENDER;
this._cycle = 0.15; // a cycle of a flow map phase
this._halfCycle = this._cycle * 0.5;
this._USE_FLOW = options.flowMap !== undefined;
}
updateFlow( delta ) {
this.flowConfig.value.x += this.flowSpeed.value * delta; // flowMapOffset0
this.flowConfig.value.y = this.flowConfig.value.x + this._halfCycle; // flowMapOffset1
// Important: The distance between offsets should be always the value of "halfCycle".
// Moreover, both offsets should be in the range of [ 0, cycle ].
// This approach ensures a smooth water flow and avoids "reset" effects.
if ( this.flowConfig.value.x >= this._cycle ) {
this.flowConfig.value.x = 0;
this.flowConfig.value.y = this._halfCycle;
} else if ( this.flowConfig.value.y >= this._cycle ) {
this.flowConfig.value.y = this.flowConfig.value.y - this._cycle;
}
this.flowConfig.value.z = this._halfCycle;
}
updateBefore( frame ) {
this.updateFlow( frame.deltaTime );
}
setup() {
const outputNode = Fn( () => {
const flowMapOffset0 = this.flowConfig.x;
const flowMapOffset1 = this.flowConfig.y;
const halfCycle = this.flowConfig.z;
const toEye = normalize( cameraPosition.sub( positionWorld ) );
let flow;
if ( this._USE_FLOW === true ) {
flow = this.flowMap.rg.mul( 2 ).sub( 1 );
} else {
flow = vec2( this.flowDirection.x, this.flowDirection.y );
}
flow.x.mulAssign( - 1 );
// sample normal maps (distort uvs with flowdata)
const uvs = uv();
const normalUv0 = uvs.mul( this.scale ).add( flow.mul( flowMapOffset0 ) );
const normalUv1 = uvs.mul( this.scale ).add( flow.mul( flowMapOffset1 ) );
const normalColor0 = this.normalMap0.sample( normalUv0 );
const normalColor1 = this.normalMap1.sample( normalUv1 );
// linear interpolate to get the final normal color
const flowLerp = abs( halfCycle.sub( flowMapOffset0 ) ).div( halfCycle );
const normalColor = mix( normalColor0, normalColor1, flowLerp );
// calculate normal vector
const normal = normalize( vec3( normalColor.r.mul( 2 ).sub( 1 ), normalColor.b, normalColor.g.mul( 2 ).sub( 1 ) ) );
// calculate the fresnel term to blend reflection and refraction maps
const theta = max( dot( toEye, normal ), 0 );
const reflectance = pow( float( 1.0 ).sub( theta ), 5.0 ).mul( float( 1.0 ).sub( this.reflectivity ) ).add( this.reflectivity );
// reflector, refractor
const offset = normal.xz.mul( 0.05 ).toVar();
const reflectionSampler = reflector();
this.waterBody.add( reflectionSampler.target );
reflectionSampler.uvNode = reflectionSampler.uvNode.add( offset );
const refractorUV = screenUV.add( offset );
const refractionSampler = viewportSharedTexture( viewportSafeUV( refractorUV ) );
// calculate final uv coords
return vec4( this.color, 1.0 ).mul( mix( refractionSampler, reflectionSampler, reflectance ) );
} )();
return outputNode;
}
}
/**
* Constructor options of `WaterMesh`.
*
* @typedef {Object} module:Water2Mesh~Options
* @property {number|Color|string} [color=0xFFFFFF] - The water color.
* @property {Vector2} [flowDirection=(1,0)] - The water's flow direction.
* @property {number} [flowSpeed=0.03] - The water's flow speed.
* @property {number} [reflectivity=0.02] - The water's reflectivity.
* @property {number} [scale=1] - The water's scale.
* @property {?Texture} [flowMap=null] - The flow map. If no flow map is assigned, the water flow is defined by `flowDirection`.
* @property {Texture} normalMap0 - The first water normal map.
* @property {Texture} normalMap1 - The second water normal map.
**/
export { WaterMesh };

196
node_modules/three/examples/jsm/objects/WaterMesh.js generated vendored Normal file
View File

@@ -0,0 +1,196 @@
import {
Color,
Mesh,
Vector3,
MeshLambertNodeMaterial
} from 'three/webgpu';
import { Fn, add, cameraPosition, div, normalize, positionWorld, sub, time, texture, vec2, vec3, max, dot, reflect, pow, length, float, uniform, reflector, mul, mix, diffuseColor } from 'three/tsl';
/**
* A basic flat, reflective water effect.
*
* Note that this class can only be used with {@link WebGPURenderer}.
* When using {@link WebGLRenderer}, use {@link Water}.
*
* References:
*
* - [Flat mirror for three.js]{@link https://github.com/Slayvin}
* - [An implementation of water shader based on the flat mirror]{@link https://home.adelphi.edu/~stemkoski/}
* - [Water shader explanations in WebGL]{@link http://29a.ch/slides/2012/webglwater/ }
*
* @augments Mesh
* @three_import import { WaterMesh } from 'three/addons/objects/WaterMesh.js';
*/
class WaterMesh extends Mesh {
/**
* Constructs a new water mesh.
*
* @param {BufferGeometry} geometry - The water mesh's geometry.
* @param {WaterMesh~Options} [options] - The configuration options.
*/
constructor( geometry, options ) {
const material = new MeshLambertNodeMaterial();
super( geometry, material );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isWaterMesh = true;
/**
* The effect's resolution scale.
*
* @type {number}
* @default 0.5
*/
this.resolution = options.resolution !== undefined ? options.resolution : 0.5;
// Uniforms
/**
* The water's normal map.
*
* @type {TextureNode}
*/
this.waterNormals = texture( options.waterNormals );
/**
* The alpha value.
*
* @type {UniformNode<float>}
* @default 1
*/
this.alpha = uniform( options.alpha !== undefined ? options.alpha : 1.0 );
/**
* The size value.
*
* @type {UniformNode<float>}
* @default 1
*/
this.size = uniform( options.size !== undefined ? options.size : 1.0 );
/**
* The sun color.
*
* @type {UniformNode<color>}
* @default 0xffffff
*/
this.sunColor = uniform( new Color( options.sunColor !== undefined ? options.sunColor : 0xffffff ) );
/**
* The sun direction.
*
* @type {UniformNode<vec3>}
* @default (0.70707,0.70707,0.0)
*/
this.sunDirection = uniform( options.sunDirection !== undefined ? options.sunDirection : new Vector3( 0.70707, 0.70707, 0.0 ) );
/**
* The water color.
*
* @type {UniformNode<color>}
* @default 0x7f7f7f
*/
this.waterColor = uniform( new Color( options.waterColor !== undefined ? options.waterColor : 0x7f7f7f ) );
/**
* The distortion scale.
*
* @type {UniformNode<float>}
* @default 20
*/
this.distortionScale = uniform( options.distortionScale !== undefined ? options.distortionScale : 20.0 );
// TSL
const getNoise = Fn( ( [ uv ] ) => {
const offset = time;
const uv0 = add( div( uv, 103 ), vec2( div( offset, 17 ), div( offset, 29 ) ) ).toVar();
const uv1 = div( uv, 107 ).sub( vec2( div( offset, - 19 ), div( offset, 31 ) ) ).toVar();
const uv2 = add( div( uv, vec2( 8907.0, 9803.0 ) ), vec2( div( offset, 101 ), div( offset, 97 ) ) ).toVar();
const uv3 = sub( div( uv, vec2( 1091.0, 1027.0 ) ), vec2( div( offset, 109 ), div( offset, - 113 ) ) ).toVar();
const sample0 = this.waterNormals.sample( uv0 );
const sample1 = this.waterNormals.sample( uv1 );
const sample2 = this.waterNormals.sample( uv2 );
const sample3 = this.waterNormals.sample( uv3 );
const noise = sample0.add( sample1 ).add( sample2 ).add( sample3 );
return noise.mul( 0.5 ).sub( 1 );
} );
const noise = getNoise( positionWorld.xz.mul( this.size ) );
const surfaceNormal = normalize( noise.xzy.mul( 1.5, 1.0, 1.5 ) );
const worldToEye = cameraPosition.sub( positionWorld );
const eyeDirection = normalize( worldToEye );
const reflection = normalize( reflect( this.sunDirection.negate(), surfaceNormal ) );
const direction = max( 0.0, dot( eyeDirection, reflection ) );
const specularLight = pow( direction, 100 ).mul( this.sunColor ).mul( 2.0 );
const diffuseLight = max( dot( this.sunDirection, surfaceNormal ), 0.0 ).mul( this.sunColor ).mul( 0.5 );
const distance = length( worldToEye );
const distortion = surfaceNormal.xz.mul( float( 0.001 ).add( float( 1.0 ).div( distance ) ) ).mul( this.distortionScale );
// Material
material.transparent = true;
material.opacityNode = this.alpha;
material.receivedShadowPositionNode = positionWorld.add( distortion );
material.setupOutgoingLight = () => diffuseColor.rgb; // backwards compatibility
material.colorNode = Fn( () => {
const mirrorSampler = reflector();
mirrorSampler.uvNode = mirrorSampler.uvNode.add( distortion );
mirrorSampler.resolution = this.resolution;
this.add( mirrorSampler.target );
const theta = max( dot( eyeDirection, surfaceNormal ), 0.0 );
const rf0 = float( 0.3 );
const reflectance = mul( pow( float( 1.0 ).sub( theta ), 5.0 ), float( 1.0 ).sub( rf0 ) ).add( rf0 );
const scatter = max( 0.0, dot( surfaceNormal, eyeDirection ) ).mul( this.waterColor );
const albedo = mix( this.sunColor.mul( diffuseLight ).mul( 0.3 ).add( scatter ), mirrorSampler.rgb.mul( specularLight ).add( mirrorSampler.rgb.mul( 0.9 ) ).add( vec3( 0.1 ) ), reflectance );
return albedo;
} )();
}
}
/**
* Constructor options of `WaterMesh`.
*
* @typedef {Object} WaterMesh~Options
* @property {number} [resolution=0.5] - The resolution scale.
* @property {?Texture} [waterNormals=null] - The water's normal map.
* @property {number} [alpha=1] - The alpha value.
* @property {number} [size=1] - The size value.
* @property {number|Color|string} [sunColor=0xffffff] - The sun color.
* @property {Vector3} [sunDirection=(0.70707,0.70707,0.0)] - The sun direction.
* @property {number|Color|string} [waterColor=0x7F7F7F] - The water color.
* @property {number} [distortionScale=20] - The distortion scale.
**/
export { WaterMesh };