main repo

This commit is contained in:
Basilosaurusrex
2025-11-24 18:09:40 +01:00
parent b636ee5e70
commit f027651f9b
34146 changed files with 4436636 additions and 0 deletions

1835
node_modules/three/examples/jsm/loaders/3DMLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1621
node_modules/three/examples/jsm/loaders/3MFLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

541
node_modules/three/examples/jsm/loaders/AMFLoader.js generated vendored Normal file
View File

@@ -0,0 +1,541 @@
import {
BufferGeometry,
Color,
FileLoader,
Float32BufferAttribute,
Group,
Loader,
Mesh,
MeshPhongMaterial
} from 'three';
import * as fflate from '../libs/fflate.module.js';
/**
* A loader for the AMF format.
*
* The loader supports materials, color and ZIP compressed files.
* No constellation support (yet).
*
* ```js
* const loader = new AMFLoader();
*
* const object = await loader.loadAsync( './models/amf/rook.amf' );
* scene.add( object );
* ```
*
* @augments Loader
* @three_import import { AMFLoader } from 'three/addons/loaders/AMFLoader.js';
*/
class AMFLoader extends Loader {
/**
* Constructs a new AMF loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded AMF asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Group)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given AMF data and returns the resulting group.
*
* @param {ArrayBuffer} data - The raw AMF asset data as an array buffer.
* @return {Group} A group representing the parsed asset.
*/
parse( data ) {
function loadDocument( data ) {
let view = new DataView( data );
const magic = String.fromCharCode( view.getUint8( 0 ), view.getUint8( 1 ) );
if ( magic === 'PK' ) {
let zip = null;
let file = null;
console.log( 'THREE.AMFLoader: Loading Zip' );
try {
zip = fflate.unzipSync( new Uint8Array( data ) );
} catch ( e ) {
if ( e instanceof ReferenceError ) {
console.log( 'THREE.AMFLoader: fflate missing and file is compressed.' );
return null;
}
}
for ( file in zip ) {
if ( file.toLowerCase().slice( - 4 ) === '.amf' ) {
break;
}
}
console.log( 'THREE.AMFLoader: Trying to load file asset: ' + file );
view = new DataView( zip[ file ].buffer );
}
const fileText = new TextDecoder().decode( view );
const xmlData = new DOMParser().parseFromString( fileText, 'application/xml' );
if ( xmlData.documentElement.nodeName.toLowerCase() !== 'amf' ) {
console.log( 'THREE.AMFLoader: Error loading AMF - no AMF document found.' );
return null;
}
return xmlData;
}
function loadDocumentScale( node ) {
let scale = 1.0;
let unit = 'millimeter';
if ( node.documentElement.attributes.unit !== undefined ) {
unit = node.documentElement.attributes.unit.value.toLowerCase();
}
const scaleUnits = {
millimeter: 1.0,
inch: 25.4,
feet: 304.8,
meter: 1000.0,
micron: 0.001
};
if ( scaleUnits[ unit ] !== undefined ) {
scale = scaleUnits[ unit ];
}
console.log( 'THREE.AMFLoader: Unit scale: ' + scale );
return scale;
}
function loadMaterials( node ) {
let matName = 'AMF Material';
const matId = node.attributes.id.textContent;
let color = { r: 1.0, g: 1.0, b: 1.0, a: 1.0 };
let loadedMaterial = null;
for ( let i = 0; i < node.childNodes.length; i ++ ) {
const matChildEl = node.childNodes[ i ];
if ( matChildEl.nodeName === 'metadata' && matChildEl.attributes.type !== undefined ) {
if ( matChildEl.attributes.type.value === 'name' ) {
matName = matChildEl.textContent;
}
} else if ( matChildEl.nodeName === 'color' ) {
color = loadColor( matChildEl );
}
}
loadedMaterial = new MeshPhongMaterial( {
flatShading: true,
color: new Color( color.r, color.g, color.b ),
name: matName
} );
if ( color.a !== 1.0 ) {
loadedMaterial.transparent = true;
loadedMaterial.opacity = color.a;
}
return { id: matId, material: loadedMaterial };
}
function loadColor( node ) {
const color = { r: 1.0, g: 1.0, b: 1.0, a: 1.0 };
for ( let i = 0; i < node.childNodes.length; i ++ ) {
const matColor = node.childNodes[ i ];
if ( matColor.nodeName === 'r' ) {
color.r = matColor.textContent;
} else if ( matColor.nodeName === 'g' ) {
color.g = matColor.textContent;
} else if ( matColor.nodeName === 'b' ) {
color.b = matColor.textContent;
} else if ( matColor.nodeName === 'a' ) {
color.a = matColor.textContent;
}
}
return color;
}
function loadMeshVolume( node ) {
const volume = { name: '', triangles: [], materialId: null };
let currVolumeNode = node.firstElementChild;
if ( node.attributes.materialid !== undefined ) {
volume.materialId = node.attributes.materialid.nodeValue;
}
while ( currVolumeNode ) {
if ( currVolumeNode.nodeName === 'metadata' ) {
if ( currVolumeNode.attributes.type !== undefined ) {
if ( currVolumeNode.attributes.type.value === 'name' ) {
volume.name = currVolumeNode.textContent;
}
}
} else if ( currVolumeNode.nodeName === 'triangle' ) {
const v1 = currVolumeNode.getElementsByTagName( 'v1' )[ 0 ].textContent;
const v2 = currVolumeNode.getElementsByTagName( 'v2' )[ 0 ].textContent;
const v3 = currVolumeNode.getElementsByTagName( 'v3' )[ 0 ].textContent;
volume.triangles.push( v1, v2, v3 );
}
currVolumeNode = currVolumeNode.nextElementSibling;
}
return volume;
}
function loadMeshVertices( node ) {
const vertArray = [];
const normalArray = [];
let currVerticesNode = node.firstElementChild;
while ( currVerticesNode ) {
if ( currVerticesNode.nodeName === 'vertex' ) {
let vNode = currVerticesNode.firstElementChild;
while ( vNode ) {
if ( vNode.nodeName === 'coordinates' ) {
const x = vNode.getElementsByTagName( 'x' )[ 0 ].textContent;
const y = vNode.getElementsByTagName( 'y' )[ 0 ].textContent;
const z = vNode.getElementsByTagName( 'z' )[ 0 ].textContent;
vertArray.push( x, y, z );
} else if ( vNode.nodeName === 'normal' ) {
const nx = vNode.getElementsByTagName( 'nx' )[ 0 ].textContent;
const ny = vNode.getElementsByTagName( 'ny' )[ 0 ].textContent;
const nz = vNode.getElementsByTagName( 'nz' )[ 0 ].textContent;
normalArray.push( nx, ny, nz );
}
vNode = vNode.nextElementSibling;
}
}
currVerticesNode = currVerticesNode.nextElementSibling;
}
return { 'vertices': vertArray, 'normals': normalArray };
}
function loadObject( node ) {
const objId = node.attributes.id.textContent;
const loadedObject = { name: 'amfobject', meshes: [] };
let currColor = null;
let currObjNode = node.firstElementChild;
while ( currObjNode ) {
if ( currObjNode.nodeName === 'metadata' ) {
if ( currObjNode.attributes.type !== undefined ) {
if ( currObjNode.attributes.type.value === 'name' ) {
loadedObject.name = currObjNode.textContent;
}
}
} else if ( currObjNode.nodeName === 'color' ) {
currColor = loadColor( currObjNode );
} else if ( currObjNode.nodeName === 'mesh' ) {
let currMeshNode = currObjNode.firstElementChild;
const mesh = { vertices: [], normals: [], volumes: [], color: currColor };
while ( currMeshNode ) {
if ( currMeshNode.nodeName === 'vertices' ) {
const loadedVertices = loadMeshVertices( currMeshNode );
mesh.normals = mesh.normals.concat( loadedVertices.normals );
mesh.vertices = mesh.vertices.concat( loadedVertices.vertices );
} else if ( currMeshNode.nodeName === 'volume' ) {
mesh.volumes.push( loadMeshVolume( currMeshNode ) );
}
currMeshNode = currMeshNode.nextElementSibling;
}
loadedObject.meshes.push( mesh );
}
currObjNode = currObjNode.nextElementSibling;
}
return { 'id': objId, 'obj': loadedObject };
}
const xmlData = loadDocument( data );
let amfName = '';
let amfAuthor = '';
const amfScale = loadDocumentScale( xmlData );
const amfMaterials = {};
const amfObjects = {};
const childNodes = xmlData.documentElement.childNodes;
let i, j;
for ( i = 0; i < childNodes.length; i ++ ) {
const child = childNodes[ i ];
if ( child.nodeName === 'metadata' ) {
if ( child.attributes.type !== undefined ) {
if ( child.attributes.type.value === 'name' ) {
amfName = child.textContent;
} else if ( child.attributes.type.value === 'author' ) {
amfAuthor = child.textContent;
}
}
} else if ( child.nodeName === 'material' ) {
const loadedMaterial = loadMaterials( child );
amfMaterials[ loadedMaterial.id ] = loadedMaterial.material;
} else if ( child.nodeName === 'object' ) {
const loadedObject = loadObject( child );
amfObjects[ loadedObject.id ] = loadedObject.obj;
}
}
const sceneObject = new Group();
const defaultMaterial = new MeshPhongMaterial( {
name: Loader.DEFAULT_MATERIAL_NAME,
color: 0xaaaaff,
flatShading: true
} );
sceneObject.name = amfName;
sceneObject.userData.author = amfAuthor;
sceneObject.userData.loader = 'AMF';
for ( const id in amfObjects ) {
const part = amfObjects[ id ];
const meshes = part.meshes;
const newObject = new Group();
newObject.name = part.name || '';
for ( i = 0; i < meshes.length; i ++ ) {
let objDefaultMaterial = defaultMaterial;
const mesh = meshes[ i ];
const vertices = new Float32BufferAttribute( mesh.vertices, 3 );
let normals = null;
if ( mesh.normals.length ) {
normals = new Float32BufferAttribute( mesh.normals, 3 );
}
if ( mesh.color ) {
const color = mesh.color;
objDefaultMaterial = defaultMaterial.clone();
objDefaultMaterial.color = new Color( color.r, color.g, color.b );
if ( color.a !== 1.0 ) {
objDefaultMaterial.transparent = true;
objDefaultMaterial.opacity = color.a;
}
}
const volumes = mesh.volumes;
for ( j = 0; j < volumes.length; j ++ ) {
const volume = volumes[ j ];
const newGeometry = new BufferGeometry();
let material = objDefaultMaterial;
newGeometry.setIndex( volume.triangles );
newGeometry.setAttribute( 'position', vertices.clone() );
if ( normals ) {
newGeometry.setAttribute( 'normal', normals.clone() );
}
if ( amfMaterials[ volume.materialId ] !== undefined ) {
material = amfMaterials[ volume.materialId ];
}
newGeometry.scale( amfScale, amfScale, amfScale );
newObject.add( new Mesh( newGeometry, material.clone() ) );
}
}
sceneObject.add( newObject );
}
return sceneObject;
}
}
export { AMFLoader };

484
node_modules/three/examples/jsm/loaders/BVHLoader.js generated vendored Normal file
View File

@@ -0,0 +1,484 @@
import {
AnimationClip,
Bone,
FileLoader,
Loader,
Quaternion,
QuaternionKeyframeTrack,
Skeleton,
Vector3,
VectorKeyframeTrack
} from 'three';
/**
* A loader for the BVH format.
*
* Imports BVH files and outputs a single {@link Skeleton} and {@link AnimationClip}.
* The loader only supports BVH files containing a single root right now.
*
* ```js
* const loader = new BVHLoader();
* const result = await loader.loadAsync( 'models/bvh/pirouette.bvh' );
*
* // visualize skeleton
* const skeletonHelper = new THREE.SkeletonHelper( result.skeleton.bones[ 0 ] );
* scene.add( result.skeleton.bones[ 0 ] );
* scene.add( skeletonHelper );
*
* // play animation clip
* mixer = new THREE.AnimationMixer( result.skeleton.bones[ 0 ] );
* mixer.clipAction( result.clip ).play();
* ```
*
* @augments Loader
* @three_import import { BVHLoader } from 'three/addons/loaders/BVHLoader.js';
*/
class BVHLoader extends Loader {
/**
* Constructs a new BVH loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* Whether to animate bone positions or not.
*
* @type {boolean}
* @default true
*/
this.animateBonePositions = true;
/**
* Whether to animate bone rotations or not.
*
* @type {boolean}
* @default true
*/
this.animateBoneRotations = true;
}
/**
* Starts loading from the given URL and passes the loaded BVH asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function({skeleton:Skeleton,clip:AnimationClip})} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given BVH data and returns the resulting data.
*
* @param {string} text - The raw BVH data as a string.
* @return {{skeleton:Skeleton,clip:AnimationClip}} An object representing the parsed asset.
*/
parse( text ) {
// reads a string array (lines) from a BVH file
// and outputs a skeleton structure including motion data
// returns thee root node:
// { name: '', channels: [], children: [] }
function readBvh( lines ) {
// read model structure
if ( nextLine( lines ) !== 'HIERARCHY' ) {
console.error( 'THREE.BVHLoader: HIERARCHY expected.' );
}
const list = []; // collects flat array of all bones
const root = readNode( lines, nextLine( lines ), list );
// read motion data
if ( nextLine( lines ) !== 'MOTION' ) {
console.error( 'THREE.BVHLoader: MOTION expected.' );
}
// number of frames
let tokens = nextLine( lines ).split( /[\s]+/ );
const numFrames = parseInt( tokens[ 1 ] );
if ( isNaN( numFrames ) ) {
console.error( 'THREE.BVHLoader: Failed to read number of frames.' );
}
// frame time
tokens = nextLine( lines ).split( /[\s]+/ );
const frameTime = parseFloat( tokens[ 2 ] );
if ( isNaN( frameTime ) ) {
console.error( 'THREE.BVHLoader: Failed to read frame time.' );
}
// read frame data line by line
for ( let i = 0; i < numFrames; i ++ ) {
tokens = nextLine( lines ).split( /[\s]+/ );
readFrameData( tokens, i * frameTime, root );
}
return list;
}
/*
Recursively reads data from a single frame into the bone hierarchy.
The passed bone hierarchy has to be structured in the same order as the BVH file.
keyframe data is stored in bone.frames.
- data: splitted string array (frame values), values are shift()ed so
this should be empty after parsing the whole hierarchy.
- frameTime: playback time for this keyframe.
- bone: the bone to read frame data from.
*/
function readFrameData( data, frameTime, bone ) {
// end sites have no motion data
if ( bone.type === 'ENDSITE' ) return;
// add keyframe
const keyframe = {
time: frameTime,
position: new Vector3(),
rotation: new Quaternion()
};
bone.frames.push( keyframe );
const quat = new Quaternion();
const vx = new Vector3( 1, 0, 0 );
const vy = new Vector3( 0, 1, 0 );
const vz = new Vector3( 0, 0, 1 );
// parse values for each channel in node
for ( let i = 0; i < bone.channels.length; i ++ ) {
switch ( bone.channels[ i ] ) {
case 'Xposition':
keyframe.position.x = parseFloat( data.shift().trim() );
break;
case 'Yposition':
keyframe.position.y = parseFloat( data.shift().trim() );
break;
case 'Zposition':
keyframe.position.z = parseFloat( data.shift().trim() );
break;
case 'Xrotation':
quat.setFromAxisAngle( vx, parseFloat( data.shift().trim() ) * Math.PI / 180 );
keyframe.rotation.multiply( quat );
break;
case 'Yrotation':
quat.setFromAxisAngle( vy, parseFloat( data.shift().trim() ) * Math.PI / 180 );
keyframe.rotation.multiply( quat );
break;
case 'Zrotation':
quat.setFromAxisAngle( vz, parseFloat( data.shift().trim() ) * Math.PI / 180 );
keyframe.rotation.multiply( quat );
break;
default:
console.warn( 'THREE.BVHLoader: Invalid channel type.' );
}
}
// parse child nodes
for ( let i = 0; i < bone.children.length; i ++ ) {
readFrameData( data, frameTime, bone.children[ i ] );
}
}
/*
Recursively parses the HIERARCHY section of the BVH file
- lines: all lines of the file. lines are consumed as we go along.
- firstline: line containing the node type and name e.g. 'JOINT hip'
- list: collects a flat list of nodes
returns: a BVH node including children
*/
function readNode( lines, firstline, list ) {
const node = { name: '', type: '', frames: [] };
list.push( node );
// parse node type and name
let tokens = firstline.split( /[\s]+/ );
if ( tokens[ 0 ].toUpperCase() === 'END' && tokens[ 1 ].toUpperCase() === 'SITE' ) {
node.type = 'ENDSITE';
node.name = 'ENDSITE'; // bvh end sites have no name
} else {
node.name = tokens[ 1 ];
node.type = tokens[ 0 ].toUpperCase();
}
if ( nextLine( lines ) !== '{' ) {
console.error( 'THREE.BVHLoader: Expected opening { after type & name' );
}
// parse OFFSET
tokens = nextLine( lines ).split( /[\s]+/ );
if ( tokens[ 0 ] !== 'OFFSET' ) {
console.error( 'THREE.BVHLoader: Expected OFFSET but got: ' + tokens[ 0 ] );
}
if ( tokens.length !== 4 ) {
console.error( 'THREE.BVHLoader: Invalid number of values for OFFSET.' );
}
const offset = new Vector3(
parseFloat( tokens[ 1 ] ),
parseFloat( tokens[ 2 ] ),
parseFloat( tokens[ 3 ] )
);
if ( isNaN( offset.x ) || isNaN( offset.y ) || isNaN( offset.z ) ) {
console.error( 'THREE.BVHLoader: Invalid values of OFFSET.' );
}
node.offset = offset;
// parse CHANNELS definitions
if ( node.type !== 'ENDSITE' ) {
tokens = nextLine( lines ).split( /[\s]+/ );
if ( tokens[ 0 ] !== 'CHANNELS' ) {
console.error( 'THREE.BVHLoader: Expected CHANNELS definition.' );
}
const numChannels = parseInt( tokens[ 1 ] );
node.channels = tokens.splice( 2, numChannels );
node.children = [];
}
// read children
while ( true ) {
const line = nextLine( lines );
if ( line === '}' ) {
return node;
} else {
node.children.push( readNode( lines, line, list ) );
}
}
}
/*
recursively converts the internal bvh node structure to a Bone hierarchy
source: the bvh root node
list: pass an empty array, collects a flat list of all converted THREE.Bones
returns the root Bone
*/
function toTHREEBone( source, list ) {
const bone = new Bone();
list.push( bone );
bone.position.add( source.offset );
bone.name = source.name;
if ( source.type !== 'ENDSITE' ) {
for ( let i = 0; i < source.children.length; i ++ ) {
bone.add( toTHREEBone( source.children[ i ], list ) );
}
}
return bone;
}
/*
builds an AnimationClip from the keyframe data saved in each bone.
bone: bvh root node
returns: an AnimationClip containing position and quaternion tracks
*/
function toTHREEAnimation( bones ) {
const tracks = [];
// create a position and quaternion animation track for each node
for ( let i = 0; i < bones.length; i ++ ) {
const bone = bones[ i ];
if ( bone.type === 'ENDSITE' )
continue;
// track data
const times = [];
const positions = [];
const rotations = [];
for ( let j = 0; j < bone.frames.length; j ++ ) {
const frame = bone.frames[ j ];
times.push( frame.time );
// the animation system animates the position property,
// so we have to add the joint offset to all values
positions.push( frame.position.x + bone.offset.x );
positions.push( frame.position.y + bone.offset.y );
positions.push( frame.position.z + bone.offset.z );
rotations.push( frame.rotation.x );
rotations.push( frame.rotation.y );
rotations.push( frame.rotation.z );
rotations.push( frame.rotation.w );
}
if ( scope.animateBonePositions ) {
tracks.push( new VectorKeyframeTrack( bone.name + '.position', times, positions ) );
}
if ( scope.animateBoneRotations ) {
tracks.push( new QuaternionKeyframeTrack( bone.name + '.quaternion', times, rotations ) );
}
}
return new AnimationClip( 'animation', - 1, tracks );
}
/*
returns the next non-empty line in lines
*/
function nextLine( lines ) {
let line;
// skip empty lines
while ( ( line = lines.shift().trim() ).length === 0 ) { }
return line;
}
const scope = this;
const lines = text.split( /[\r\n]+/g );
const bones = readBvh( lines );
const threeBones = [];
toTHREEBone( bones[ 0 ], threeBones );
const threeClip = toTHREEAnimation( bones );
return {
skeleton: new Skeleton( threeBones ),
clip: threeClip
};
}
}
export { BVHLoader };

4155
node_modules/three/examples/jsm/loaders/ColladaLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

385
node_modules/three/examples/jsm/loaders/DDSLoader.js generated vendored Normal file
View File

@@ -0,0 +1,385 @@
import {
CompressedTextureLoader,
RGBAFormat,
RGBA_S3TC_DXT3_Format,
RGBA_S3TC_DXT5_Format,
RGB_ETC1_Format,
RGB_S3TC_DXT1_Format,
RGB_BPTC_SIGNED_Format,
RGB_BPTC_UNSIGNED_Format
} from 'three';
/**
* A loader for the S3TC texture compression format.
*
* ```js
* const loader = new DDSLoader();
*
* const map = loader.load( 'textures/compressed/disturb_dxt1_nomip.dds' );
* map.colorSpace = THREE.SRGBColorSpace; // only for color textures
* ```
*
* @augments CompressedTextureLoader
* @three_import import { DDSLoader } from 'three/addons/loaders/DDSLoader.js';
*/
class DDSLoader extends CompressedTextureLoader {
/**
* Constructs a new DDS loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Parses the given S3TC texture data.
*
* @param {ArrayBuffer} buffer - The raw texture data.
* @param {boolean} loadMipmaps - Whether to load mipmaps or not.
* @return {CompressedTextureLoader~TexData} An object representing the parsed texture data.
*/
parse( buffer, loadMipmaps ) {
const dds = { mipmaps: [], width: 0, height: 0, format: null, mipmapCount: 1 };
// Adapted from @toji's DDS utils
// https://github.com/toji/webgl-texture-utils/blob/master/texture-util/dds.js
// All values and structures referenced from:
// http://msdn.microsoft.com/en-us/library/bb943991.aspx/
const DDS_MAGIC = 0x20534444;
// const DDSD_CAPS = 0x1;
// const DDSD_HEIGHT = 0x2;
// const DDSD_WIDTH = 0x4;
// const DDSD_PITCH = 0x8;
// const DDSD_PIXELFORMAT = 0x1000;
const DDSD_MIPMAPCOUNT = 0x20000;
// const DDSD_LINEARSIZE = 0x80000;
// const DDSD_DEPTH = 0x800000;
// const DDSCAPS_COMPLEX = 0x8;
// const DDSCAPS_MIPMAP = 0x400000;
// const DDSCAPS_TEXTURE = 0x1000;
const DDSCAPS2_CUBEMAP = 0x200;
const DDSCAPS2_CUBEMAP_POSITIVEX = 0x400;
const DDSCAPS2_CUBEMAP_NEGATIVEX = 0x800;
const DDSCAPS2_CUBEMAP_POSITIVEY = 0x1000;
const DDSCAPS2_CUBEMAP_NEGATIVEY = 0x2000;
const DDSCAPS2_CUBEMAP_POSITIVEZ = 0x4000;
const DDSCAPS2_CUBEMAP_NEGATIVEZ = 0x8000;
// const DDSCAPS2_VOLUME = 0x200000;
// const DDPF_ALPHAPIXELS = 0x1;
// const DDPF_ALPHA = 0x2;
// const DDPF_FOURCC = 0x4;
// const DDPF_RGB = 0x40;
// const DDPF_YUV = 0x200;
// const DDPF_LUMINANCE = 0x20000;
const DXGI_FORMAT_BC6H_UF16 = 95;
const DXGI_FORMAT_BC6H_SF16 = 96;
function fourCCToInt32( value ) {
return value.charCodeAt( 0 ) +
( value.charCodeAt( 1 ) << 8 ) +
( value.charCodeAt( 2 ) << 16 ) +
( value.charCodeAt( 3 ) << 24 );
}
function int32ToFourCC( value ) {
return String.fromCharCode(
value & 0xff,
( value >> 8 ) & 0xff,
( value >> 16 ) & 0xff,
( value >> 24 ) & 0xff
);
}
function loadARGBMip( buffer, dataOffset, width, height ) {
const dataLength = width * height * 4;
const srcBuffer = new Uint8Array( buffer, dataOffset, dataLength );
const byteArray = new Uint8Array( dataLength );
let dst = 0;
let src = 0;
for ( let y = 0; y < height; y ++ ) {
for ( let x = 0; x < width; x ++ ) {
const b = srcBuffer[ src ]; src ++;
const g = srcBuffer[ src ]; src ++;
const r = srcBuffer[ src ]; src ++;
const a = srcBuffer[ src ]; src ++;
byteArray[ dst ] = r; dst ++; //r
byteArray[ dst ] = g; dst ++; //g
byteArray[ dst ] = b; dst ++; //b
byteArray[ dst ] = a; dst ++; //a
}
}
return byteArray;
}
function loadRGBMip( buffer, dataOffset, width, height ) {
const dataLength = width * height * 3;
const srcBuffer = new Uint8Array( buffer, dataOffset, dataLength );
const byteArray = new Uint8Array( width * height * 4 );
let dst = 0;
let src = 0;
for ( let y = 0; y < height; y ++ ) {
for ( let x = 0; x < width; x ++ ) {
const b = srcBuffer[ src ]; src ++;
const g = srcBuffer[ src ]; src ++;
const r = srcBuffer[ src ]; src ++;
byteArray[ dst ] = r; dst ++; //r
byteArray[ dst ] = g; dst ++; //g
byteArray[ dst ] = b; dst ++; //b
byteArray[ dst ] = 255; dst ++; //a
}
}
return byteArray;
}
const FOURCC_DXT1 = fourCCToInt32( 'DXT1' );
const FOURCC_DXT3 = fourCCToInt32( 'DXT3' );
const FOURCC_DXT5 = fourCCToInt32( 'DXT5' );
const FOURCC_ETC1 = fourCCToInt32( 'ETC1' );
const FOURCC_DX10 = fourCCToInt32( 'DX10' );
const headerLengthInt = 31; // The header length in 32 bit ints
const extendedHeaderLengthInt = 5; // The extended header length in 32 bit ints
// Offsets into the header array
const off_magic = 0;
const off_size = 1;
const off_flags = 2;
const off_height = 3;
const off_width = 4;
const off_mipmapCount = 7;
// const off_pfFlags = 20;
const off_pfFourCC = 21;
const off_RGBBitCount = 22;
const off_RBitMask = 23;
const off_GBitMask = 24;
const off_BBitMask = 25;
const off_ABitMask = 26;
// const off_caps = 27;
const off_caps2 = 28;
// const off_caps3 = 29;
// const off_caps4 = 30;
// If fourCC = DX10, the extended header starts after 32
const off_dxgiFormat = 0;
// Parse header
const header = new Int32Array( buffer, 0, headerLengthInt );
if ( header[ off_magic ] !== DDS_MAGIC ) {
console.error( 'THREE.DDSLoader.parse: Invalid magic number in DDS header.' );
return dds;
}
let blockBytes;
const fourCC = header[ off_pfFourCC ];
let isRGBAUncompressed = false;
let isRGBUncompressed = false;
let dataOffset = header[ off_size ] + 4;
switch ( fourCC ) {
case FOURCC_DXT1:
blockBytes = 8;
dds.format = RGB_S3TC_DXT1_Format;
break;
case FOURCC_DXT3:
blockBytes = 16;
dds.format = RGBA_S3TC_DXT3_Format;
break;
case FOURCC_DXT5:
blockBytes = 16;
dds.format = RGBA_S3TC_DXT5_Format;
break;
case FOURCC_ETC1:
blockBytes = 8;
dds.format = RGB_ETC1_Format;
break;
case FOURCC_DX10:
dataOffset += extendedHeaderLengthInt * 4;
const extendedHeader = new Int32Array( buffer, ( headerLengthInt + 1 ) * 4, extendedHeaderLengthInt );
const dxgiFormat = extendedHeader[ off_dxgiFormat ];
switch ( dxgiFormat ) {
case DXGI_FORMAT_BC6H_SF16: {
blockBytes = 16;
dds.format = RGB_BPTC_SIGNED_Format;
break;
}
case DXGI_FORMAT_BC6H_UF16: {
blockBytes = 16;
dds.format = RGB_BPTC_UNSIGNED_Format;
break;
}
default: {
console.error( 'THREE.DDSLoader.parse: Unsupported DXGI_FORMAT code ', dxgiFormat );
return dds;
}
}
break;
default:
if ( header[ off_RGBBitCount ] === 32
&& header[ off_RBitMask ] & 0xff0000
&& header[ off_GBitMask ] & 0xff00
&& header[ off_BBitMask ] & 0xff
&& header[ off_ABitMask ] & 0xff000000 ) {
isRGBAUncompressed = true;
blockBytes = 64;
dds.format = RGBAFormat;
} else if ( header[ off_RGBBitCount ] === 24
&& header[ off_RBitMask ] & 0xff0000
&& header[ off_GBitMask ] & 0xff00
&& header[ off_BBitMask ] & 0xff ) {
isRGBUncompressed = true;
blockBytes = 64;
dds.format = RGBAFormat;
} else {
console.error( 'THREE.DDSLoader.parse: Unsupported FourCC code ', int32ToFourCC( fourCC ) );
return dds;
}
}
dds.mipmapCount = 1;
if ( header[ off_flags ] & DDSD_MIPMAPCOUNT && loadMipmaps !== false ) {
dds.mipmapCount = Math.max( 1, header[ off_mipmapCount ] );
}
const caps2 = header[ off_caps2 ];
dds.isCubemap = caps2 & DDSCAPS2_CUBEMAP ? true : false;
if ( dds.isCubemap && (
! ( caps2 & DDSCAPS2_CUBEMAP_POSITIVEX ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_NEGATIVEX ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_POSITIVEY ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_NEGATIVEY ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_POSITIVEZ ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_NEGATIVEZ )
) ) {
console.error( 'THREE.DDSLoader.parse: Incomplete cubemap faces' );
return dds;
}
dds.width = header[ off_width ];
dds.height = header[ off_height ];
// Extract mipmaps buffers
const faces = dds.isCubemap ? 6 : 1;
for ( let face = 0; face < faces; face ++ ) {
let width = dds.width;
let height = dds.height;
for ( let i = 0; i < dds.mipmapCount; i ++ ) {
let byteArray, dataLength;
if ( isRGBAUncompressed ) {
byteArray = loadARGBMip( buffer, dataOffset, width, height );
dataLength = byteArray.length;
} else if ( isRGBUncompressed ) {
byteArray = loadRGBMip( buffer, dataOffset, width, height );
dataLength = width * height * 3;
} else {
dataLength = Math.max( 4, width ) / 4 * Math.max( 4, height ) / 4 * blockBytes;
byteArray = new Uint8Array( buffer, dataOffset, dataLength );
}
const mipmap = { 'data': byteArray, 'width': width, 'height': height };
dds.mipmaps.push( mipmap );
dataOffset += dataLength;
width = Math.max( width >> 1, 1 );
height = Math.max( height >> 1, 1 );
}
}
return dds;
}
}
export { DDSLoader };

688
node_modules/three/examples/jsm/loaders/DRACOLoader.js generated vendored Normal file
View File

@@ -0,0 +1,688 @@
import {
BufferAttribute,
BufferGeometry,
Color,
ColorManagement,
FileLoader,
Loader,
LinearSRGBColorSpace,
SRGBColorSpace
} from 'three';
const _taskCache = new WeakMap();
/**
* A loader for the Draco format.
*
* [Draco]{@link https://google.github.io/draco/} is an open source library for compressing
* and decompressing 3D meshes and point clouds. Compressed geometry can be significantly smaller,
* at the cost of additional decoding time on the client device.
*
* Standalone Draco files have a `.drc` extension, and contain vertex positions, normals, colors,
* and other attributes. Draco files do not contain materials, textures, animation, or node hierarchies
* to use these features, embed Draco geometry inside of a glTF file. A normal glTF file can be converted
* to a Draco-compressed glTF file using [glTF-Pipeline]{@link https://github.com/CesiumGS/gltf-pipeline}.
* When using Draco with glTF, an instance of `DRACOLoader` will be used internally by {@link GLTFLoader}.
*
* It is recommended to create one DRACOLoader instance and reuse it to avoid loading and creating
* multiple decoder instances.
*
* `DRACOLoader` will automatically use either the JS or the WASM decoding library, based on
* browser capabilities.
*
* ```js
* const loader = new DRACOLoader();
* loader.setDecoderPath( '/examples/jsm/libs/draco/' );
*
* const geometry = await dracoLoader.loadAsync( 'models/draco/bunny.drc' );
* geometry.computeVertexNormals(); // optional
*
* dracoLoader.dispose();
* ```
*
* @augments Loader
* @three_import import { DRACOLoader } from 'three/addons/loaders/DRACOLoader.js';
*/
class DRACOLoader extends Loader {
/**
* Constructs a new Draco loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
this.decoderPath = '';
this.decoderConfig = {};
this.decoderBinary = null;
this.decoderPending = null;
this.workerLimit = 4;
this.workerPool = [];
this.workerNextTaskID = 1;
this.workerSourceURL = '';
this.defaultAttributeIDs = {
position: 'POSITION',
normal: 'NORMAL',
color: 'COLOR',
uv: 'TEX_COORD'
};
this.defaultAttributeTypes = {
position: 'Float32Array',
normal: 'Float32Array',
color: 'Float32Array',
uv: 'Float32Array'
};
}
/**
* Provides configuration for the decoder libraries. Configuration cannot be changed after decoding begins.
*
* @param {string} path - The decoder path.
* @return {DRACOLoader} A reference to this loader.
*/
setDecoderPath( path ) {
this.decoderPath = path;
return this;
}
/**
* Provides configuration for the decoder libraries. Configuration cannot be changed after decoding begins.
*
* @param {{type:('js'|'wasm')}} config - The decoder config.
* @return {DRACOLoader} A reference to this loader.
*/
setDecoderConfig( config ) {
this.decoderConfig = config;
return this;
}
/**
* Sets the maximum number of Web Workers to be used during decoding.
* A lower limit may be preferable if workers are also for other tasks in the application.
*
* @param {number} workerLimit - The worker limit.
* @return {DRACOLoader} A reference to this loader.
*/
setWorkerLimit( workerLimit ) {
this.workerLimit = workerLimit;
return this;
}
/**
* Starts loading from the given URL and passes the loaded Draco asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(BufferGeometry)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, ( buffer ) => {
this.parse( buffer, onLoad, onError );
}, onProgress, onError );
}
/**
* Parses the given Draco data.
*
* @param {ArrayBuffer} buffer - The raw Draco data as an array buffer.
* @param {function(BufferGeometry)} onLoad - Executed when the loading/parsing process has been finished.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
parse( buffer, onLoad, onError = ()=>{} ) {
this.decodeDracoFile( buffer, onLoad, null, null, SRGBColorSpace, onError ).catch( onError );
}
//
decodeDracoFile( buffer, callback, attributeIDs, attributeTypes, vertexColorSpace = LinearSRGBColorSpace, onError = () => {} ) {
const taskConfig = {
attributeIDs: attributeIDs || this.defaultAttributeIDs,
attributeTypes: attributeTypes || this.defaultAttributeTypes,
useUniqueIDs: !! attributeIDs,
vertexColorSpace: vertexColorSpace,
};
return this.decodeGeometry( buffer, taskConfig ).then( callback ).catch( onError );
}
decodeGeometry( buffer, taskConfig ) {
const taskKey = JSON.stringify( taskConfig );
// Check for an existing task using this buffer. A transferred buffer cannot be transferred
// again from this thread.
if ( _taskCache.has( buffer ) ) {
const cachedTask = _taskCache.get( buffer );
if ( cachedTask.key === taskKey ) {
return cachedTask.promise;
} else if ( buffer.byteLength === 0 ) {
// Technically, it would be possible to wait for the previous task to complete,
// transfer the buffer back, and decode again with the second configuration. That
// is complex, and I don't know of any reason to decode a Draco buffer twice in
// different ways, so this is left unimplemented.
throw new Error(
'THREE.DRACOLoader: Unable to re-decode a buffer with different ' +
'settings. Buffer has already been transferred.'
);
}
}
//
let worker;
const taskID = this.workerNextTaskID ++;
const taskCost = buffer.byteLength;
// Obtain a worker and assign a task, and construct a geometry instance
// when the task completes.
const geometryPending = this._getWorker( taskID, taskCost )
.then( ( _worker ) => {
worker = _worker;
return new Promise( ( resolve, reject ) => {
worker._callbacks[ taskID ] = { resolve, reject };
worker.postMessage( { type: 'decode', id: taskID, taskConfig, buffer }, [ buffer ] );
// this.debug();
} );
} )
.then( ( message ) => this._createGeometry( message.geometry ) );
// Remove task from the task list.
// Note: replaced '.finally()' with '.catch().then()' block - iOS 11 support (#19416)
geometryPending
.catch( () => true )
.then( () => {
if ( worker && taskID ) {
this._releaseTask( worker, taskID );
// this.debug();
}
} );
// Cache the task result.
_taskCache.set( buffer, {
key: taskKey,
promise: geometryPending
} );
return geometryPending;
}
_createGeometry( geometryData ) {
const geometry = new BufferGeometry();
if ( geometryData.index ) {
geometry.setIndex( new BufferAttribute( geometryData.index.array, 1 ) );
}
for ( let i = 0; i < geometryData.attributes.length; i ++ ) {
const result = geometryData.attributes[ i ];
const name = result.name;
const array = result.array;
const itemSize = result.itemSize;
const attribute = new BufferAttribute( array, itemSize );
if ( name === 'color' ) {
this._assignVertexColorSpace( attribute, result.vertexColorSpace );
attribute.normalized = ( array instanceof Float32Array ) === false;
}
geometry.setAttribute( name, attribute );
}
return geometry;
}
_assignVertexColorSpace( attribute, inputColorSpace ) {
// While .drc files do not specify colorspace, the only 'official' tooling
// is PLY and OBJ converters, which use sRGB. We'll assume sRGB when a .drc
// file is passed into .load() or .parse(). GLTFLoader uses internal APIs
// to decode geometry, and vertex colors are already Linear-sRGB in there.
if ( inputColorSpace !== SRGBColorSpace ) return;
const _color = new Color();
for ( let i = 0, il = attribute.count; i < il; i ++ ) {
_color.fromBufferAttribute( attribute, i );
ColorManagement.colorSpaceToWorking( _color, SRGBColorSpace );
attribute.setXYZ( i, _color.r, _color.g, _color.b );
}
}
_loadLibrary( url, responseType ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.decoderPath );
loader.setResponseType( responseType );
loader.setWithCredentials( this.withCredentials );
return new Promise( ( resolve, reject ) => {
loader.load( url, resolve, undefined, reject );
} );
}
preload() {
this._initDecoder();
return this;
}
_initDecoder() {
if ( this.decoderPending ) return this.decoderPending;
const useJS = typeof WebAssembly !== 'object' || this.decoderConfig.type === 'js';
const librariesPending = [];
if ( useJS ) {
librariesPending.push( this._loadLibrary( 'draco_decoder.js', 'text' ) );
} else {
librariesPending.push( this._loadLibrary( 'draco_wasm_wrapper.js', 'text' ) );
librariesPending.push( this._loadLibrary( 'draco_decoder.wasm', 'arraybuffer' ) );
}
this.decoderPending = Promise.all( librariesPending )
.then( ( libraries ) => {
const jsContent = libraries[ 0 ];
if ( ! useJS ) {
this.decoderConfig.wasmBinary = libraries[ 1 ];
}
const fn = DRACOWorker.toString();
const body = [
'/* draco decoder */',
jsContent,
'',
'/* worker */',
fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) )
].join( '\n' );
this.workerSourceURL = URL.createObjectURL( new Blob( [ body ] ) );
} );
return this.decoderPending;
}
_getWorker( taskID, taskCost ) {
return this._initDecoder().then( () => {
if ( this.workerPool.length < this.workerLimit ) {
const worker = new Worker( this.workerSourceURL );
worker._callbacks = {};
worker._taskCosts = {};
worker._taskLoad = 0;
worker.postMessage( { type: 'init', decoderConfig: this.decoderConfig } );
worker.onmessage = function ( e ) {
const message = e.data;
switch ( message.type ) {
case 'decode':
worker._callbacks[ message.id ].resolve( message );
break;
case 'error':
worker._callbacks[ message.id ].reject( message );
break;
default:
console.error( 'THREE.DRACOLoader: Unexpected message, "' + message.type + '"' );
}
};
this.workerPool.push( worker );
} else {
this.workerPool.sort( function ( a, b ) {
return a._taskLoad > b._taskLoad ? - 1 : 1;
} );
}
const worker = this.workerPool[ this.workerPool.length - 1 ];
worker._taskCosts[ taskID ] = taskCost;
worker._taskLoad += taskCost;
return worker;
} );
}
_releaseTask( worker, taskID ) {
worker._taskLoad -= worker._taskCosts[ taskID ];
delete worker._callbacks[ taskID ];
delete worker._taskCosts[ taskID ];
}
debug() {
console.log( 'Task load: ', this.workerPool.map( ( worker ) => worker._taskLoad ) );
}
dispose() {
for ( let i = 0; i < this.workerPool.length; ++ i ) {
this.workerPool[ i ].terminate();
}
this.workerPool.length = 0;
if ( this.workerSourceURL !== '' ) {
URL.revokeObjectURL( this.workerSourceURL );
}
return this;
}
}
/* WEB WORKER */
function DRACOWorker() {
let decoderConfig;
let decoderPending;
onmessage = function ( e ) {
const message = e.data;
switch ( message.type ) {
case 'init':
decoderConfig = message.decoderConfig;
decoderPending = new Promise( function ( resolve/*, reject*/ ) {
decoderConfig.onModuleLoaded = function ( draco ) {
// Module is Promise-like. Wrap before resolving to avoid loop.
resolve( { draco: draco } );
};
DracoDecoderModule( decoderConfig ); // eslint-disable-line no-undef
} );
break;
case 'decode':
const buffer = message.buffer;
const taskConfig = message.taskConfig;
decoderPending.then( ( module ) => {
const draco = module.draco;
const decoder = new draco.Decoder();
try {
const geometry = decodeGeometry( draco, decoder, new Int8Array( buffer ), taskConfig );
const buffers = geometry.attributes.map( ( attr ) => attr.array.buffer );
if ( geometry.index ) buffers.push( geometry.index.array.buffer );
self.postMessage( { type: 'decode', id: message.id, geometry }, buffers );
} catch ( error ) {
console.error( error );
self.postMessage( { type: 'error', id: message.id, error: error.message } );
} finally {
draco.destroy( decoder );
}
} );
break;
}
};
function decodeGeometry( draco, decoder, array, taskConfig ) {
const attributeIDs = taskConfig.attributeIDs;
const attributeTypes = taskConfig.attributeTypes;
let dracoGeometry;
let decodingStatus;
const geometryType = decoder.GetEncodedGeometryType( array );
if ( geometryType === draco.TRIANGULAR_MESH ) {
dracoGeometry = new draco.Mesh();
decodingStatus = decoder.DecodeArrayToMesh( array, array.byteLength, dracoGeometry );
} else if ( geometryType === draco.POINT_CLOUD ) {
dracoGeometry = new draco.PointCloud();
decodingStatus = decoder.DecodeArrayToPointCloud( array, array.byteLength, dracoGeometry );
} else {
throw new Error( 'THREE.DRACOLoader: Unexpected geometry type.' );
}
if ( ! decodingStatus.ok() || dracoGeometry.ptr === 0 ) {
throw new Error( 'THREE.DRACOLoader: Decoding failed: ' + decodingStatus.error_msg() );
}
const geometry = { index: null, attributes: [] };
// Gather all vertex attributes.
for ( const attributeName in attributeIDs ) {
const attributeType = self[ attributeTypes[ attributeName ] ];
let attribute;
let attributeID;
// A Draco file may be created with default vertex attributes, whose attribute IDs
// are mapped 1:1 from their semantic name (POSITION, NORMAL, ...). Alternatively,
// a Draco file may contain a custom set of attributes, identified by known unique
// IDs. glTF files always do the latter, and `.drc` files typically do the former.
if ( taskConfig.useUniqueIDs ) {
attributeID = attributeIDs[ attributeName ];
attribute = decoder.GetAttributeByUniqueId( dracoGeometry, attributeID );
} else {
attributeID = decoder.GetAttributeId( dracoGeometry, draco[ attributeIDs[ attributeName ] ] );
if ( attributeID === - 1 ) continue;
attribute = decoder.GetAttribute( dracoGeometry, attributeID );
}
const attributeResult = decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute );
if ( attributeName === 'color' ) {
attributeResult.vertexColorSpace = taskConfig.vertexColorSpace;
}
geometry.attributes.push( attributeResult );
}
// Add index.
if ( geometryType === draco.TRIANGULAR_MESH ) {
geometry.index = decodeIndex( draco, decoder, dracoGeometry );
}
draco.destroy( dracoGeometry );
return geometry;
}
function decodeIndex( draco, decoder, dracoGeometry ) {
const numFaces = dracoGeometry.num_faces();
const numIndices = numFaces * 3;
const byteLength = numIndices * 4;
const ptr = draco._malloc( byteLength );
decoder.GetTrianglesUInt32Array( dracoGeometry, byteLength, ptr );
const index = new Uint32Array( draco.HEAPF32.buffer, ptr, numIndices ).slice();
draco._free( ptr );
return { array: index, itemSize: 1 };
}
function decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute ) {
const numComponents = attribute.num_components();
const numPoints = dracoGeometry.num_points();
const numValues = numPoints * numComponents;
const byteLength = numValues * attributeType.BYTES_PER_ELEMENT;
const dataType = getDracoDataType( draco, attributeType );
const ptr = draco._malloc( byteLength );
decoder.GetAttributeDataArrayForAllPoints( dracoGeometry, attribute, dataType, byteLength, ptr );
const array = new attributeType( draco.HEAPF32.buffer, ptr, numValues ).slice();
draco._free( ptr );
return {
name: attributeName,
array: array,
itemSize: numComponents
};
}
function getDracoDataType( draco, attributeType ) {
switch ( attributeType ) {
case Float32Array: return draco.DT_FLOAT32;
case Int8Array: return draco.DT_INT8;
case Int16Array: return draco.DT_INT16;
case Int32Array: return draco.DT_INT32;
case Uint8Array: return draco.DT_UINT8;
case Uint16Array: return draco.DT_UINT16;
case Uint32Array: return draco.DT_UINT32;
}
}
}
export { DRACOLoader };

2784
node_modules/three/examples/jsm/loaders/EXRLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

4382
node_modules/three/examples/jsm/loaders/FBXLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

242
node_modules/three/examples/jsm/loaders/FontLoader.js generated vendored Normal file
View File

@@ -0,0 +1,242 @@
import {
FileLoader,
Loader,
ShapePath
} from 'three';
/**
* A loader for loading fonts.
*
* You can convert fonts online using [facetype.js]{@link https://gero3.github.io/facetype.js/}.
*
* ```js
* const loader = new FontLoader();
* const font = await loader.loadAsync( 'fonts/helvetiker_regular.typeface.json' );
* ```
*
* @augments Loader
* @three_import import { FontLoader } from 'three/addons/loaders/FontLoader.js';
*/
class FontLoader extends Loader {
/**
* Constructs a new font loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded font
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Font)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
const font = scope.parse( JSON.parse( text ) );
if ( onLoad ) onLoad( font );
}, onProgress, onError );
}
/**
* Parses the given font data and returns the resulting font.
*
* @param {Object} json - The raw font data as a JSON object.
* @return {Font} The font.
*/
parse( json ) {
return new Font( json );
}
}
/**
* Class representing a font.
*/
class Font {
/**
* Constructs a new font.
*
* @param {Object} data - The font data as JSON.
*/
constructor( data ) {
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isFont = true;
this.type = 'Font';
/**
* The font data as JSON.
*
* @type {Object}
*/
this.data = data;
}
/**
* Generates geometry shapes from the given text and size. The result of this method
* should be used with {@link ShapeGeometry} to generate the actual geometry data.
*
* @param {string} text - The text.
* @param {number} [size=100] - The text size.
* @return {Array<Shape>} An array of shapes representing the text.
*/
generateShapes( text, size = 100 ) {
const shapes = [];
const paths = createPaths( text, size, this.data );
for ( let p = 0, pl = paths.length; p < pl; p ++ ) {
shapes.push( ...paths[ p ].toShapes() );
}
return shapes;
}
}
function createPaths( text, size, data ) {
const chars = Array.from( text );
const scale = size / data.resolution;
const line_height = ( data.boundingBox.yMax - data.boundingBox.yMin + data.underlineThickness ) * scale;
const paths = [];
let offsetX = 0, offsetY = 0;
for ( let i = 0; i < chars.length; i ++ ) {
const char = chars[ i ];
if ( char === '\n' ) {
offsetX = 0;
offsetY -= line_height;
} else {
const ret = createPath( char, scale, offsetX, offsetY, data );
offsetX += ret.offsetX;
paths.push( ret.path );
}
}
return paths;
}
function createPath( char, scale, offsetX, offsetY, data ) {
const glyph = data.glyphs[ char ] || data.glyphs[ '?' ];
if ( ! glyph ) {
console.error( 'THREE.Font: character "' + char + '" does not exists in font family ' + data.familyName + '.' );
return;
}
const path = new ShapePath();
let x, y, cpx, cpy, cpx1, cpy1, cpx2, cpy2;
if ( glyph.o ) {
const outline = glyph._cachedOutline || ( glyph._cachedOutline = glyph.o.split( ' ' ) );
for ( let i = 0, l = outline.length; i < l; ) {
const action = outline[ i ++ ];
switch ( action ) {
case 'm': // moveTo
x = outline[ i ++ ] * scale + offsetX;
y = outline[ i ++ ] * scale + offsetY;
path.moveTo( x, y );
break;
case 'l': // lineTo
x = outline[ i ++ ] * scale + offsetX;
y = outline[ i ++ ] * scale + offsetY;
path.lineTo( x, y );
break;
case 'q': // quadraticCurveTo
cpx = outline[ i ++ ] * scale + offsetX;
cpy = outline[ i ++ ] * scale + offsetY;
cpx1 = outline[ i ++ ] * scale + offsetX;
cpy1 = outline[ i ++ ] * scale + offsetY;
path.quadraticCurveTo( cpx1, cpy1, cpx, cpy );
break;
case 'b': // bezierCurveTo
cpx = outline[ i ++ ] * scale + offsetX;
cpy = outline[ i ++ ] * scale + offsetY;
cpx1 = outline[ i ++ ] * scale + offsetX;
cpy1 = outline[ i ++ ] * scale + offsetY;
cpx2 = outline[ i ++ ] * scale + offsetX;
cpy2 = outline[ i ++ ] * scale + offsetY;
path.bezierCurveTo( cpx1, cpy1, cpx2, cpy2, cpx, cpy );
break;
}
}
}
return { offsetX: glyph.ha * scale, path: path };
}
export { FontLoader, Font };

292
node_modules/three/examples/jsm/loaders/GCodeLoader.js generated vendored Normal file
View File

@@ -0,0 +1,292 @@
import {
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Group,
LineBasicMaterial,
LineSegments,
Loader
} from 'three';
/**
* A loader for the GCode format.
*
* GCode files are usually used for 3D printing or CNC applications.
*
* ```js
* const loader = new GCodeLoader();
* const object = await loader.loadAsync( 'models/gcode/benchy.gcode' );
* scene.add( object );
* ```
*
* @augments Loader
* @three_import import { GCodeLoader } from 'three/addons/loaders/GCodeLoader.js';
*/
class GCodeLoader extends Loader {
/**
* Constructs a new GCode loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* Whether to split layers or not.
*
* @type {boolean}
* @default false
*/
this.splitLayer = false;
}
/**
* Starts loading from the given URL and passes the loaded GCode asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Group)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given GCode data and returns a group with lines.
*
* @param {string} data - The raw Gcode data as a string.
* @return {Group} The parsed GCode asset.
*/
parse( data ) {
let state = { x: 0, y: 0, z: 0, e: 0, f: 0, extruding: false, relative: false };
const layers = [];
let currentLayer = undefined;
const pathMaterial = new LineBasicMaterial( { color: 0xFF0000 } );
pathMaterial.name = 'path';
const extrudingMaterial = new LineBasicMaterial( { color: 0x00FF00 } );
extrudingMaterial.name = 'extruded';
function newLayer( line ) {
currentLayer = { vertex: [], pathVertex: [], z: line.z };
layers.push( currentLayer );
}
//Create lie segment between p1 and p2
function addSegment( p1, p2 ) {
if ( currentLayer === undefined ) {
newLayer( p1 );
}
if ( state.extruding ) {
currentLayer.vertex.push( p1.x, p1.y, p1.z );
currentLayer.vertex.push( p2.x, p2.y, p2.z );
} else {
currentLayer.pathVertex.push( p1.x, p1.y, p1.z );
currentLayer.pathVertex.push( p2.x, p2.y, p2.z );
}
}
function delta( v1, v2 ) {
return state.relative ? v2 : v2 - v1;
}
function absolute( v1, v2 ) {
return state.relative ? v1 + v2 : v2;
}
const lines = data.replace( /;.+/g, '' ).split( '\n' );
for ( let i = 0; i < lines.length; i ++ ) {
const tokens = lines[ i ].split( ' ' );
const cmd = tokens[ 0 ].toUpperCase();
//Arguments
const args = {};
tokens.splice( 1 ).forEach( function ( token ) {
if ( token[ 0 ] !== undefined ) {
const key = token[ 0 ].toLowerCase();
const value = parseFloat( token.substring( 1 ) );
args[ key ] = value;
}
} );
//Process commands
//G0/G1 Linear Movement
if ( cmd === 'G0' || cmd === 'G1' ) {
const line = {
x: args.x !== undefined ? absolute( state.x, args.x ) : state.x,
y: args.y !== undefined ? absolute( state.y, args.y ) : state.y,
z: args.z !== undefined ? absolute( state.z, args.z ) : state.z,
e: args.e !== undefined ? absolute( state.e, args.e ) : state.e,
f: args.f !== undefined ? absolute( state.f, args.f ) : state.f,
};
//Layer change detection is or made by watching Z, it's made by watching when we extrude at a new Z position
if ( delta( state.e, line.e ) > 0 ) {
state.extruding = delta( state.e, line.e ) > 0;
if ( currentLayer == undefined || line.z != currentLayer.z ) {
newLayer( line );
}
}
addSegment( state, line );
state = line;
} else if ( cmd === 'G2' || cmd === 'G3' ) {
//G2/G3 - Arc Movement ( G2 clock wise and G3 counter clock wise )
//console.warn( 'THREE.GCodeLoader: Arc command not supported' );
} else if ( cmd === 'G90' ) {
//G90: Set to Absolute Positioning
state.relative = false;
} else if ( cmd === 'G91' ) {
//G91: Set to state.relative Positioning
state.relative = true;
} else if ( cmd === 'G92' ) {
//G92: Set Position
const line = state;
line.x = args.x !== undefined ? args.x : line.x;
line.y = args.y !== undefined ? args.y : line.y;
line.z = args.z !== undefined ? args.z : line.z;
line.e = args.e !== undefined ? args.e : line.e;
} else {
//console.warn( 'THREE.GCodeLoader: Command not supported:' + cmd );
}
}
function addObject( vertex, extruding, i ) {
const geometry = new BufferGeometry();
geometry.setAttribute( 'position', new Float32BufferAttribute( vertex, 3 ) );
const segments = new LineSegments( geometry, extruding ? extrudingMaterial : pathMaterial );
segments.name = 'layer' + i;
object.add( segments );
}
const object = new Group();
object.name = 'gcode';
if ( this.splitLayer ) {
for ( let i = 0; i < layers.length; i ++ ) {
const layer = layers[ i ];
addObject( layer.vertex, true, i );
addObject( layer.pathVertex, false, i );
}
} else {
const vertex = [],
pathVertex = [];
for ( let i = 0; i < layers.length; i ++ ) {
const layer = layers[ i ];
const layerVertex = layer.vertex;
const layerPathVertex = layer.pathVertex;
for ( let j = 0; j < layerVertex.length; j ++ ) {
vertex.push( layerVertex[ j ] );
}
for ( let j = 0; j < layerPathVertex.length; j ++ ) {
pathVertex.push( layerPathVertex[ j ] );
}
}
addObject( vertex, true, layers.length );
addObject( pathVertex, false, layers.length );
}
object.rotation.set( - Math.PI / 2, 0, 0 );
return object;
}
}
export { GCodeLoader };

4890
node_modules/three/examples/jsm/loaders/GLTFLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,164 @@
import {
CubeTexture,
DataTexture,
FileLoader,
FloatType,
HalfFloatType,
LinearFilter,
LinearSRGBColorSpace,
Loader
} from 'three';
import { HDRLoader } from '../loaders/HDRLoader.js';
/**
* A loader for loading HDR cube textures.
*
* ```js
* const loader = new HDRCubeTextureLoader();
* loader.setPath( 'textures/cube/pisaHDR/' );
* const cubeTexture = await loader.loadAsync( [ 'px.hdr', 'nx.hdr', 'py.hdr', 'ny.hdr', 'pz.hdr', 'nz.hdr' ] );
*
* scene.background = cubeTexture;
* scene.environment = cubeTexture;
* ```
*
* @augments Loader
* @three_import import { HDRCubeTextureLoader } from 'three/addons/loaders/HDRCubeTextureLoader.js';
*/
class HDRCubeTextureLoader extends Loader {
/**
* Constructs a new HDR cube texture loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* The internal HDR loader that loads the
* individual textures for each cube face.
*
* @type {HDRLoader}
*/
this.hdrLoader = new HDRLoader();
/**
* The texture type.
*
* @type {(HalfFloatType|FloatType)}
* @default HalfFloatType
*/
this.type = HalfFloatType;
}
/**
* Starts loading from the given URLs and passes the loaded HDR cube texture
* to the `onLoad()` callback.
*
* @param {Array<string>} urls - The paths/URLs of the files to be loaded. This can also be a data URIs.
* @param {function(CubeTexture)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
* @return {CubeTexture} The HDR cube texture.
*/
load( urls, onLoad, onProgress, onError ) {
const texture = new CubeTexture();
texture.type = this.type;
switch ( texture.type ) {
case FloatType:
texture.colorSpace = LinearSRGBColorSpace;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
break;
case HalfFloatType:
texture.colorSpace = LinearSRGBColorSpace;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
break;
}
const scope = this;
let loaded = 0;
function loadHDRData( i, onLoad, onProgress, onError ) {
new FileLoader( scope.manager )
.setPath( scope.path )
.setResponseType( 'arraybuffer' )
.setWithCredentials( scope.withCredentials )
.load( urls[ i ], function ( buffer ) {
loaded ++;
const texData = scope.hdrLoader.parse( buffer );
if ( ! texData ) return;
if ( texData.data !== undefined ) {
const dataTexture = new DataTexture( texData.data, texData.width, texData.height );
dataTexture.type = texture.type;
dataTexture.colorSpace = texture.colorSpace;
dataTexture.format = texture.format;
dataTexture.minFilter = texture.minFilter;
dataTexture.magFilter = texture.magFilter;
dataTexture.generateMipmaps = texture.generateMipmaps;
texture.images[ i ] = dataTexture;
}
if ( loaded === 6 ) {
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture );
}
}, onProgress, onError );
}
for ( let i = 0; i < urls.length; i ++ ) {
loadHDRData( i, onLoad, onProgress, onError );
}
return texture;
}
/**
* Sets the texture type.
*
* @param {(HalfFloatType|FloatType)} value - The texture type to set.
* @return {HDRCubeTextureLoader} A reference to this loader.
*/
setDataType( value ) {
this.type = value;
this.hdrLoader.setDataType( value );
return this;
}
}
export { HDRCubeTextureLoader };

486
node_modules/three/examples/jsm/loaders/HDRLoader.js generated vendored Normal file
View File

@@ -0,0 +1,486 @@
import {
DataTextureLoader,
DataUtils,
FloatType,
HalfFloatType,
LinearFilter,
LinearSRGBColorSpace
} from 'three';
/**
* A loader for the RGBE HDR texture format.
*
* ```js
* const loader = new HDRLoader();
* const envMap = await loader.loadAsync( 'textures/equirectangular/blouberg_sunrise_2_1k.hdr' );
* envMap.mapping = THREE.EquirectangularReflectionMapping;
*
* scene.environment = envMap;
* ```
*
* @augments DataTextureLoader
* @three_import import { HDRLoader } from 'three/addons/loaders/HDRLoader.js';
*/
class HDRLoader extends DataTextureLoader {
/**
* Constructs a new RGBE/HDR loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* The texture type.
*
* @type {(HalfFloatType|FloatType)}
* @default HalfFloatType
*/
this.type = HalfFloatType;
}
/**
* Parses the given RGBE texture data.
*
* @param {ArrayBuffer} buffer - The raw texture data.
* @return {DataTextureLoader~TexData} An object representing the parsed texture data.
*/
parse( buffer ) {
// adapted from http://www.graphics.cornell.edu/~bjw/rgbe.html
const
/* default error routine. change this to change error handling */
rgbe_read_error = 1,
rgbe_write_error = 2,
rgbe_format_error = 3,
rgbe_memory_error = 4,
rgbe_error = function ( rgbe_error_code, msg ) {
switch ( rgbe_error_code ) {
case rgbe_read_error: throw new Error( 'THREE.HDRLoader: Read Error: ' + ( msg || '' ) );
case rgbe_write_error: throw new Error( 'THREE.HDRLoader: Write Error: ' + ( msg || '' ) );
case rgbe_format_error: throw new Error( 'THREE.HDRLoader: Bad File Format: ' + ( msg || '' ) );
default:
case rgbe_memory_error: throw new Error( 'THREE.HDRLoader: Memory Error: ' + ( msg || '' ) );
}
},
/* offsets to red, green, and blue components in a data (float) pixel */
//RGBE_DATA_RED = 0,
//RGBE_DATA_GREEN = 1,
//RGBE_DATA_BLUE = 2,
/* number of floats per pixel, use 4 since stored in rgba image format */
//RGBE_DATA_SIZE = 4,
/* flags indicating which fields in an rgbe_header_info are valid */
RGBE_VALID_PROGRAMTYPE = 1,
RGBE_VALID_FORMAT = 2,
RGBE_VALID_DIMENSIONS = 4,
NEWLINE = '\n',
fgets = function ( buffer, lineLimit, consume ) {
const chunkSize = 128;
lineLimit = ! lineLimit ? 1024 : lineLimit;
let p = buffer.pos,
i = - 1, len = 0, s = '',
chunk = String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
while ( ( 0 > ( i = chunk.indexOf( NEWLINE ) ) ) && ( len < lineLimit ) && ( p < buffer.byteLength ) ) {
s += chunk; len += chunk.length;
p += chunkSize;
chunk += String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
}
if ( - 1 < i ) {
/*for (i=l-1; i>=0; i--) {
byteCode = m.charCodeAt(i);
if (byteCode > 0x7f && byteCode <= 0x7ff) byteLen++;
else if (byteCode > 0x7ff && byteCode <= 0xffff) byteLen += 2;
if (byteCode >= 0xDC00 && byteCode <= 0xDFFF) i--; //trail surrogate
}*/
if ( false !== consume ) buffer.pos += len + i + 1;
return s + chunk.slice( 0, i );
}
return false;
},
/* minimal header reading. modify if you want to parse more information */
RGBE_ReadHeader = function ( buffer ) {
// regexes to parse header info fields
const magic_token_re = /^#\?(\S+)/,
gamma_re = /^\s*GAMMA\s*=\s*(\d+(\.\d+)?)\s*$/,
exposure_re = /^\s*EXPOSURE\s*=\s*(\d+(\.\d+)?)\s*$/,
format_re = /^\s*FORMAT=(\S+)\s*$/,
dimensions_re = /^\s*\-Y\s+(\d+)\s+\+X\s+(\d+)\s*$/,
// RGBE format header struct
header = {
valid: 0, /* indicate which fields are valid */
string: '', /* the actual header string */
comments: '', /* comments found in header */
programtype: 'RGBE', /* listed at beginning of file to identify it after "#?". defaults to "RGBE" */
format: '', /* RGBE format, default 32-bit_rle_rgbe */
gamma: 1.0, /* image has already been gamma corrected with given gamma. defaults to 1.0 (no correction) */
exposure: 1.0, /* a value of 1.0 in an image corresponds to <exposure> watts/steradian/m^2. defaults to 1.0 */
width: 0, height: 0 /* image dimensions, width/height */
};
let line, match;
if ( buffer.pos >= buffer.byteLength || ! ( line = fgets( buffer ) ) ) {
rgbe_error( rgbe_read_error, 'no header found' );
}
/* if you want to require the magic token then uncomment the next line */
if ( ! ( match = line.match( magic_token_re ) ) ) {
rgbe_error( rgbe_format_error, 'bad initial token' );
}
header.valid |= RGBE_VALID_PROGRAMTYPE;
header.programtype = match[ 1 ];
header.string += line + '\n';
while ( true ) {
line = fgets( buffer );
if ( false === line ) break;
header.string += line + '\n';
if ( '#' === line.charAt( 0 ) ) {
header.comments += line + '\n';
continue; // comment line
}
if ( match = line.match( gamma_re ) ) {
header.gamma = parseFloat( match[ 1 ] );
}
if ( match = line.match( exposure_re ) ) {
header.exposure = parseFloat( match[ 1 ] );
}
if ( match = line.match( format_re ) ) {
header.valid |= RGBE_VALID_FORMAT;
header.format = match[ 1 ];//'32-bit_rle_rgbe';
}
if ( match = line.match( dimensions_re ) ) {
header.valid |= RGBE_VALID_DIMENSIONS;
header.height = parseInt( match[ 1 ], 10 );
header.width = parseInt( match[ 2 ], 10 );
}
if ( ( header.valid & RGBE_VALID_FORMAT ) && ( header.valid & RGBE_VALID_DIMENSIONS ) ) break;
}
if ( ! ( header.valid & RGBE_VALID_FORMAT ) ) {
rgbe_error( rgbe_format_error, 'missing format specifier' );
}
if ( ! ( header.valid & RGBE_VALID_DIMENSIONS ) ) {
rgbe_error( rgbe_format_error, 'missing image size specifier' );
}
return header;
},
RGBE_ReadPixels_RLE = function ( buffer, w, h ) {
const scanline_width = w;
if (
// run length encoding is not allowed so read flat
( ( scanline_width < 8 ) || ( scanline_width > 0x7fff ) ) ||
// this file is not run length encoded
( ( 2 !== buffer[ 0 ] ) || ( 2 !== buffer[ 1 ] ) || ( buffer[ 2 ] & 0x80 ) )
) {
// return the flat buffer
return new Uint8Array( buffer );
}
if ( scanline_width !== ( ( buffer[ 2 ] << 8 ) | buffer[ 3 ] ) ) {
rgbe_error( rgbe_format_error, 'wrong scanline width' );
}
const data_rgba = new Uint8Array( 4 * w * h );
if ( ! data_rgba.length ) {
rgbe_error( rgbe_memory_error, 'unable to allocate buffer space' );
}
let offset = 0, pos = 0;
const ptr_end = 4 * scanline_width;
const rgbeStart = new Uint8Array( 4 );
const scanline_buffer = new Uint8Array( ptr_end );
let num_scanlines = h;
// read in each successive scanline
while ( ( num_scanlines > 0 ) && ( pos < buffer.byteLength ) ) {
if ( pos + 4 > buffer.byteLength ) {
rgbe_error( rgbe_read_error );
}
rgbeStart[ 0 ] = buffer[ pos ++ ];
rgbeStart[ 1 ] = buffer[ pos ++ ];
rgbeStart[ 2 ] = buffer[ pos ++ ];
rgbeStart[ 3 ] = buffer[ pos ++ ];
if ( ( 2 != rgbeStart[ 0 ] ) || ( 2 != rgbeStart[ 1 ] ) || ( ( ( rgbeStart[ 2 ] << 8 ) | rgbeStart[ 3 ] ) != scanline_width ) ) {
rgbe_error( rgbe_format_error, 'bad rgbe scanline format' );
}
// read each of the four channels for the scanline into the buffer
// first red, then green, then blue, then exponent
let ptr = 0, count;
while ( ( ptr < ptr_end ) && ( pos < buffer.byteLength ) ) {
count = buffer[ pos ++ ];
const isEncodedRun = count > 128;
if ( isEncodedRun ) count -= 128;
if ( ( 0 === count ) || ( ptr + count > ptr_end ) ) {
rgbe_error( rgbe_format_error, 'bad scanline data' );
}
if ( isEncodedRun ) {
// a (encoded) run of the same value
const byteValue = buffer[ pos ++ ];
for ( let i = 0; i < count; i ++ ) {
scanline_buffer[ ptr ++ ] = byteValue;
}
//ptr += count;
} else {
// a literal-run
scanline_buffer.set( buffer.subarray( pos, pos + count ), ptr );
ptr += count; pos += count;
}
}
// now convert data from buffer into rgba
// first red, then green, then blue, then exponent (alpha)
const l = scanline_width; //scanline_buffer.byteLength;
for ( let i = 0; i < l; i ++ ) {
let off = 0;
data_rgba[ offset ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 1 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 2 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 3 ] = scanline_buffer[ i + off ];
offset += 4;
}
num_scanlines --;
}
return data_rgba;
};
const RGBEByteToRGBFloat = function ( sourceArray, sourceOffset, destArray, destOffset ) {
const e = sourceArray[ sourceOffset + 3 ];
const scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
destArray[ destOffset + 0 ] = sourceArray[ sourceOffset + 0 ] * scale;
destArray[ destOffset + 1 ] = sourceArray[ sourceOffset + 1 ] * scale;
destArray[ destOffset + 2 ] = sourceArray[ sourceOffset + 2 ] * scale;
destArray[ destOffset + 3 ] = 1;
};
const RGBEByteToRGBHalf = function ( sourceArray, sourceOffset, destArray, destOffset ) {
const e = sourceArray[ sourceOffset + 3 ];
const scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
// clamping to 65504, the maximum representable value in float16
destArray[ destOffset + 0 ] = DataUtils.toHalfFloat( Math.min( sourceArray[ sourceOffset + 0 ] * scale, 65504 ) );
destArray[ destOffset + 1 ] = DataUtils.toHalfFloat( Math.min( sourceArray[ sourceOffset + 1 ] * scale, 65504 ) );
destArray[ destOffset + 2 ] = DataUtils.toHalfFloat( Math.min( sourceArray[ sourceOffset + 2 ] * scale, 65504 ) );
destArray[ destOffset + 3 ] = DataUtils.toHalfFloat( 1 );
};
const byteArray = new Uint8Array( buffer );
byteArray.pos = 0;
const rgbe_header_info = RGBE_ReadHeader( byteArray );
const w = rgbe_header_info.width,
h = rgbe_header_info.height,
image_rgba_data = RGBE_ReadPixels_RLE( byteArray.subarray( byteArray.pos ), w, h );
let data, type;
let numElements;
switch ( this.type ) {
case FloatType:
numElements = image_rgba_data.length / 4;
const floatArray = new Float32Array( numElements * 4 );
for ( let j = 0; j < numElements; j ++ ) {
RGBEByteToRGBFloat( image_rgba_data, j * 4, floatArray, j * 4 );
}
data = floatArray;
type = FloatType;
break;
case HalfFloatType:
numElements = image_rgba_data.length / 4;
const halfArray = new Uint16Array( numElements * 4 );
for ( let j = 0; j < numElements; j ++ ) {
RGBEByteToRGBHalf( image_rgba_data, j * 4, halfArray, j * 4 );
}
data = halfArray;
type = HalfFloatType;
break;
default:
throw new Error( 'THREE.HDRLoader: Unsupported type: ' + this.type );
break;
}
return {
width: w, height: h,
data: data,
header: rgbe_header_info.string,
gamma: rgbe_header_info.gamma,
exposure: rgbe_header_info.exposure,
type: type
};
}
/**
* Sets the texture type.
*
* @param {(HalfFloatType|FloatType)} value - The texture type to set.
* @return {HDRLoader} A reference to this loader.
*/
setDataType( value ) {
this.type = value;
return this;
}
load( url, onLoad, onProgress, onError ) {
function onLoadCallback( texture, texData ) {
switch ( texture.type ) {
case FloatType:
case HalfFloatType:
texture.colorSpace = LinearSRGBColorSpace;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
}
if ( onLoad ) onLoad( texture, texData );
}
return super.load( url, onLoadCallback, onProgress, onError );
}
}
export { HDRLoader };

379
node_modules/three/examples/jsm/loaders/IESLoader.js generated vendored Normal file
View File

@@ -0,0 +1,379 @@
import {
DataTexture,
FileLoader,
FloatType,
RedFormat,
MathUtils,
Loader,
UnsignedByteType,
LinearFilter,
HalfFloatType,
DataUtils
} from 'three';
/**
* A loader for the IES format.
*
* The loaded texture should be assigned to {@link IESSpotLight#map}.
*
* ```js
* const loader = new IESLoader();
* const texture = await loader.loadAsync( 'ies/007cfb11e343e2f42e3b476be4ab684e.ies' );
*
* const spotLight = new THREE.IESSpotLight( 0xff0000, 500 );
* spotLight.iesMap = texture;
* ```
*
* @augments Loader
* @three_import import { IESLoader } from 'three/addons/loaders/IESLoader.js';
*/
class IESLoader extends Loader {
/**
* Constructs a new IES loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* The texture type.
*
* @type {(HalfFloatType|FloatType)}
* @default HalfFloatType
*/
this.type = HalfFloatType;
}
_getIESValues( iesLamp, type ) {
const width = 360;
const height = 180;
const size = width * height;
const data = new Array( size );
function interpolateCandelaValues( phi, theta ) {
let phiIndex = 0, thetaIndex = 0;
let startTheta = 0, endTheta = 0, startPhi = 0, endPhi = 0;
for ( let i = 0; i < iesLamp.numHorAngles - 1; ++ i ) { // numHorAngles = horAngles.length-1 because of extra padding, so this wont cause an out of bounds error
if ( theta < iesLamp.horAngles[ i + 1 ] || i == iesLamp.numHorAngles - 2 ) {
thetaIndex = i;
startTheta = iesLamp.horAngles[ i ];
endTheta = iesLamp.horAngles[ i + 1 ];
break;
}
}
for ( let i = 0; i < iesLamp.numVerAngles - 1; ++ i ) {
if ( phi < iesLamp.verAngles[ i + 1 ] || i == iesLamp.numVerAngles - 2 ) {
phiIndex = i;
startPhi = iesLamp.verAngles[ i ];
endPhi = iesLamp.verAngles[ i + 1 ];
break;
}
}
const deltaTheta = endTheta - startTheta;
const deltaPhi = endPhi - startPhi;
if ( deltaPhi === 0 ) // Outside range
return 0;
const t1 = deltaTheta === 0 ? 0 : ( theta - startTheta ) / deltaTheta;
const t2 = ( phi - startPhi ) / deltaPhi;
const nextThetaIndex = deltaTheta === 0 ? thetaIndex : thetaIndex + 1;
const v1 = MathUtils.lerp( iesLamp.candelaValues[ thetaIndex ][ phiIndex ], iesLamp.candelaValues[ nextThetaIndex ][ phiIndex ], t1 );
const v2 = MathUtils.lerp( iesLamp.candelaValues[ thetaIndex ][ phiIndex + 1 ], iesLamp.candelaValues[ nextThetaIndex ][ phiIndex + 1 ], t1 );
const v = MathUtils.lerp( v1, v2, t2 );
return v;
}
const startTheta = iesLamp.horAngles[ 0 ], endTheta = iesLamp.horAngles[ iesLamp.numHorAngles - 1 ];
for ( let i = 0; i < size; ++ i ) {
let theta = i % width;
const phi = Math.floor( i / width );
if ( endTheta - startTheta !== 0 && ( theta < startTheta || theta >= endTheta ) ) { // Handle symmetry for hor angles
theta %= endTheta * 2;
if ( theta > endTheta )
theta = endTheta * 2 - theta;
}
data[ phi + theta * height ] = interpolateCandelaValues( phi, theta );
}
let result = null;
if ( type === UnsignedByteType ) result = Uint8Array.from( data.map( v => Math.min( v * 0xFF, 0xFF ) ) );
else if ( type === HalfFloatType ) result = Uint16Array.from( data.map( v => DataUtils.toHalfFloat( v ) ) );
else if ( type === FloatType ) result = Float32Array.from( data );
else console.error( 'IESLoader: Unsupported type:', type );
return result;
}
/**
* Starts loading from the given URL and passes the loaded IES texture
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(DataTexture)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setResponseType( 'text' );
loader.setCrossOrigin( this.crossOrigin );
loader.setWithCredentials( this.withCredentials );
loader.setPath( this.path );
loader.setRequestHeader( this.requestHeader );
loader.load( url, text => {
onLoad( this.parse( text ) );
}, onProgress, onError );
}
/**
* Parses the given IES data.
*
* @param {string} text - The raw IES data.
* @return {DataTexture} THE IES data as a texture.
*/
parse( text ) {
const type = this.type;
const iesLamp = new IESLamp( text );
const data = this._getIESValues( iesLamp, type );
const texture = new DataTexture( data, 180, 1, RedFormat, type );
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.needsUpdate = true;
return texture;
}
}
function IESLamp( text ) {
const _self = this;
const textArray = text.split( '\n' );
let lineNumber = 0;
let line;
_self.verAngles = [ ];
_self.horAngles = [ ];
_self.candelaValues = [ ];
_self.tiltData = { };
_self.tiltData.angles = [ ];
_self.tiltData.mulFactors = [ ];
function textToArray( text ) {
text = text.replace( /^\s+|\s+$/g, '' ); // remove leading or trailing spaces
text = text.replace( /,/g, ' ' ); // replace commas with spaces
text = text.replace( /\s\s+/g, ' ' ); // replace white space/tabs etc by single whitespace
const array = text.split( ' ' );
return array;
}
function readArray( count, array ) {
while ( true ) {
const line = textArray[ lineNumber ++ ];
const lineData = textToArray( line );
for ( let i = 0; i < lineData.length; ++ i ) {
array.push( Number( lineData[ i ] ) );
}
if ( array.length === count )
break;
}
}
function readTilt() {
let line = textArray[ lineNumber ++ ];
let lineData = textToArray( line );
_self.tiltData.lampToLumGeometry = Number( lineData[ 0 ] );
line = textArray[ lineNumber ++ ];
lineData = textToArray( line );
_self.tiltData.numAngles = Number( lineData[ 0 ] );
readArray( _self.tiltData.numAngles, _self.tiltData.angles );
readArray( _self.tiltData.numAngles, _self.tiltData.mulFactors );
}
function readLampValues() {
const values = [ ];
readArray( 10, values );
_self.count = Number( values[ 0 ] );
_self.lumens = Number( values[ 1 ] );
_self.multiplier = Number( values[ 2 ] );
_self.numVerAngles = Number( values[ 3 ] );
_self.numHorAngles = Number( values[ 4 ] );
_self.gonioType = Number( values[ 5 ] );
_self.units = Number( values[ 6 ] );
_self.width = Number( values[ 7 ] );
_self.length = Number( values[ 8 ] );
_self.height = Number( values[ 9 ] );
}
function readLampFactors() {
const values = [ ];
readArray( 3, values );
_self.ballFactor = Number( values[ 0 ] );
_self.blpFactor = Number( values[ 1 ] );
_self.inputWatts = Number( values[ 2 ] );
}
while ( true ) {
line = textArray[ lineNumber ++ ];
if ( line.includes( 'TILT' ) ) {
break;
}
}
if ( ! line.includes( 'NONE' ) ) {
if ( line.includes( 'INCLUDE' ) ) {
readTilt();
} else {
// TODO:: Read tilt data from a file
}
}
readLampValues();
readLampFactors();
// Initialize candela value array
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
_self.candelaValues.push( [ ] );
}
// Parse Angles
readArray( _self.numVerAngles, _self.verAngles );
readArray( _self.numHorAngles, _self.horAngles );
// Parse Candela values
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
readArray( _self.numVerAngles, _self.candelaValues[ i ] );
}
// Calculate actual candela values, and normalize.
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
for ( let j = 0; j < _self.numVerAngles; ++ j ) {
_self.candelaValues[ i ][ j ] *= _self.candelaValues[ i ][ j ] * _self.multiplier
* _self.ballFactor * _self.blpFactor;
}
}
let maxVal = - 1;
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
for ( let j = 0; j < _self.numVerAngles; ++ j ) {
const value = _self.candelaValues[ i ][ j ];
maxVal = maxVal < value ? value : maxVal;
}
}
const bNormalize = true;
if ( bNormalize && maxVal > 0 ) {
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
for ( let j = 0; j < _self.numVerAngles; ++ j ) {
_self.candelaValues[ i ][ j ] /= maxVal;
}
}
}
}
export { IESLoader };

163
node_modules/three/examples/jsm/loaders/KMZLoader.js generated vendored Normal file
View File

@@ -0,0 +1,163 @@
import {
FileLoader,
Group,
Loader,
LoadingManager
} from 'three';
import { ColladaLoader } from '../loaders/ColladaLoader.js';
import * as fflate from '../libs/fflate.module.js';
/**
* A loader for the KMZ format.
*
* ```js
* const loader = new KMZLoader();
* const kmz = await loader.loadAsync( './models/kmz/Box.kmz' );
*
* scene.add( kmz.scene );
* ```
*
* @augments Loader
* @three_import import { KMZLoader } from 'three/addons/loaders/KMZLoader.js';
*/
class KMZLoader extends Loader {
/**
* Constructs a new KMZ loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded KMZ asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function({scene:Group})} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given KMZ data and returns an object holding the scene.
*
* @param {ArrayBuffer} data - The raw KMZ data as an array buffer.
* @return {{scene:Group}} The parsed KMZ asset.
*/
parse( data ) {
function findFile( url ) {
for ( const path in zip ) {
if ( path.slice( - url.length ) === url ) {
return zip[ path ];
}
}
}
const manager = new LoadingManager();
manager.setURLModifier( function ( url ) {
const image = findFile( url );
if ( image ) {
console.log( 'Loading', url );
const blob = new Blob( [ image.buffer ], { type: 'application/octet-stream' } );
return URL.createObjectURL( blob );
}
return url;
} );
//
const zip = fflate.unzipSync( new Uint8Array( data ) );
if ( zip[ 'doc.kml' ] ) {
const xml = new DOMParser().parseFromString( fflate.strFromU8( zip[ 'doc.kml' ] ), 'application/xml' );
const model = xml.querySelector( 'Placemark Model Link href' );
if ( model ) {
const loader = new ColladaLoader( manager );
return loader.parse( fflate.strFromU8( zip[ model.textContent ] ) );
}
} else {
console.warn( 'KMZLoader: Missing doc.kml file.' );
for ( const path in zip ) {
const extension = path.split( '.' ).pop().toLowerCase();
if ( extension === 'dae' ) {
const loader = new ColladaLoader( manager );
return loader.parse( fflate.strFromU8( zip[ path ] ) );
}
}
}
console.error( 'KMZLoader: Couldn\'t find .dae file.' );
return { scene: new Group() };
}
}
export { KMZLoader };

1228
node_modules/three/examples/jsm/loaders/KTX2Loader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

197
node_modules/three/examples/jsm/loaders/KTXLoader.js generated vendored Normal file
View File

@@ -0,0 +1,197 @@
import {
CompressedTextureLoader
} from 'three';
/**
* A loader for the KTX texture compression format.
*
* References:
* - [The KTX File Format and Tools]{@link https://www.khronos.org/opengles/sdk/tools/KTX/}
* - [Babylon.JS khronosTextureContainer.ts]{@link https://github.com/BabylonJS/Babylon.js/blob/master/src/Misc/khronosTextureContainer.ts}
*
* ```js
* const loader = new KTXLoader();
*
* const map = loader.load( 'textures/compressed/lensflare_ASTC8x8.ktx' )
* map.colorSpace = THREE.SRGBColorSpace; // only for color textures
* ```
*
* @augments CompressedTextureLoader
* @three_import import { KTXLoader } from 'three/addons/loaders/KTXLoader.js';
*/
class KTXLoader extends CompressedTextureLoader {
/**
* Constructs a new KTX loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Parses the given KTX texture data.
*
* @param {ArrayBuffer} buffer - The raw texture data.
* @param {boolean} loadMipmaps - Whether to load mipmaps or not.
* @return {CompressedTextureLoader~TexData} An object representing the parsed texture data.
*/
parse( buffer, loadMipmaps ) {
const ktx = new KhronosTextureContainer( buffer, 1 );
return {
mipmaps: ktx.mipmaps( loadMipmaps ),
width: ktx.pixelWidth,
height: ktx.pixelHeight,
format: ktx.glInternalFormat,
isCubemap: ktx.numberOfFaces === 6,
mipmapCount: ktx.numberOfMipmapLevels
};
}
}
const HEADER_LEN = 12 + ( 13 * 4 ); // identifier + header elements (not including key value meta-data pairs)
// load types
const COMPRESSED_2D = 0; // uses a gl.compressedTexImage2D()
//const COMPRESSED_3D = 1; // uses a gl.compressedTexImage3D()
//const TEX_2D = 2; // uses a gl.texImage2D()
//const TEX_3D = 3; // uses a gl.texImage3D()
class KhronosTextureContainer {
/**
* @private
* @param {ArrayBuffer} arrayBuffer - contents of the KTX container file
* @param {number} facesExpected - should be either 1 or 6, based whether a cube texture or or
* @param {boolean} threeDExpected - provision for indicating that data should be a 3D texture, not implemented
* @param {boolean} textureArrayExpected - provision for indicating that data should be a texture array, not implemented
*/
constructor( arrayBuffer, facesExpected /*, threeDExpected, textureArrayExpected */ ) {
this.arrayBuffer = arrayBuffer;
// Test that it is a ktx formatted file, based on the first 12 bytes, character representation is:
// '´', 'K', 'T', 'X', ' ', '1', '1', 'ª', '\r', '\n', '\x1A', '\n'
// 0xAB, 0x4B, 0x54, 0x58, 0x20, 0x31, 0x31, 0xBB, 0x0D, 0x0A, 0x1A, 0x0A
const identifier = new Uint8Array( this.arrayBuffer, 0, 12 );
if ( identifier[ 0 ] !== 0xAB ||
identifier[ 1 ] !== 0x4B ||
identifier[ 2 ] !== 0x54 ||
identifier[ 3 ] !== 0x58 ||
identifier[ 4 ] !== 0x20 ||
identifier[ 5 ] !== 0x31 ||
identifier[ 6 ] !== 0x31 ||
identifier[ 7 ] !== 0xBB ||
identifier[ 8 ] !== 0x0D ||
identifier[ 9 ] !== 0x0A ||
identifier[ 10 ] !== 0x1A ||
identifier[ 11 ] !== 0x0A ) {
console.error( 'texture missing KTX identifier' );
return;
}
// load the reset of the header in native 32 bit uint
const dataSize = Uint32Array.BYTES_PER_ELEMENT;
const headerDataView = new DataView( this.arrayBuffer, 12, 13 * dataSize );
const endianness = headerDataView.getUint32( 0, true );
const littleEndian = endianness === 0x04030201;
this.glType = headerDataView.getUint32( 1 * dataSize, littleEndian ); // must be 0 for compressed textures
this.glTypeSize = headerDataView.getUint32( 2 * dataSize, littleEndian ); // must be 1 for compressed textures
this.glFormat = headerDataView.getUint32( 3 * dataSize, littleEndian ); // must be 0 for compressed textures
this.glInternalFormat = headerDataView.getUint32( 4 * dataSize, littleEndian ); // the value of arg passed to gl.compressedTexImage2D(,,x,,,,)
this.glBaseInternalFormat = headerDataView.getUint32( 5 * dataSize, littleEndian ); // specify GL_RGB, GL_RGBA, GL_ALPHA, etc (un-compressed only)
this.pixelWidth = headerDataView.getUint32( 6 * dataSize, littleEndian ); // level 0 value of arg passed to gl.compressedTexImage2D(,,,x,,,)
this.pixelHeight = headerDataView.getUint32( 7 * dataSize, littleEndian ); // level 0 value of arg passed to gl.compressedTexImage2D(,,,,x,,)
this.pixelDepth = headerDataView.getUint32( 8 * dataSize, littleEndian ); // level 0 value of arg passed to gl.compressedTexImage3D(,,,,,x,,)
this.numberOfArrayElements = headerDataView.getUint32( 9 * dataSize, littleEndian ); // used for texture arrays
this.numberOfFaces = headerDataView.getUint32( 10 * dataSize, littleEndian ); // used for cubemap textures, should either be 1 or 6
this.numberOfMipmapLevels = headerDataView.getUint32( 11 * dataSize, littleEndian ); // number of levels; disregard possibility of 0 for compressed textures
this.bytesOfKeyValueData = headerDataView.getUint32( 12 * dataSize, littleEndian ); // the amount of space after the header for meta-data
// Make sure we have a compressed type. Not only reduces work, but probably better to let dev know they are not compressing.
if ( this.glType !== 0 ) {
console.warn( 'only compressed formats currently supported' );
return;
} else {
// value of zero is an indication to generate mipmaps @ runtime. Not usually allowed for compressed, so disregard.
this.numberOfMipmapLevels = Math.max( 1, this.numberOfMipmapLevels );
}
if ( this.pixelHeight === 0 || this.pixelDepth !== 0 ) {
console.warn( 'only 2D textures currently supported' );
return;
}
if ( this.numberOfArrayElements !== 0 ) {
console.warn( 'texture arrays not currently supported' );
return;
}
if ( this.numberOfFaces !== facesExpected ) {
console.warn( 'number of faces expected' + facesExpected + ', but found ' + this.numberOfFaces );
return;
}
// we now have a completely validated file, so could use existence of loadType as success
// would need to make this more elaborate & adjust checks above to support more than one load type
this.loadType = COMPRESSED_2D;
}
mipmaps( loadMipmaps ) {
const mipmaps = [];
// initialize width & height for level 1
let dataOffset = HEADER_LEN + this.bytesOfKeyValueData;
let width = this.pixelWidth;
let height = this.pixelHeight;
const mipmapCount = loadMipmaps ? this.numberOfMipmapLevels : 1;
for ( let level = 0; level < mipmapCount; level ++ ) {
const imageSize = new Int32Array( this.arrayBuffer, dataOffset, 1 )[ 0 ]; // size per face, since not supporting array cubemaps
dataOffset += 4; // size of the image + 4 for the imageSize field
for ( let face = 0; face < this.numberOfFaces; face ++ ) {
const byteArray = new Uint8Array( this.arrayBuffer, dataOffset, imageSize );
mipmaps.push( { 'data': byteArray, 'width': width, 'height': height } );
dataOffset += imageSize;
dataOffset += 3 - ( ( imageSize + 3 ) % 4 ); // add padding for odd sized image
}
width = Math.max( 1.0, width * 0.5 );
height = Math.max( 1.0, height * 0.5 );
}
return mipmaps;
}
}
export { KTXLoader };

2519
node_modules/three/examples/jsm/loaders/LDrawLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

205
node_modules/three/examples/jsm/loaders/LUT3dlLoader.js generated vendored Normal file
View File

@@ -0,0 +1,205 @@
import {
ClampToEdgeWrapping,
Data3DTexture,
FileLoader,
LinearFilter,
Loader,
RGBAFormat,
UnsignedByteType,
} from 'three';
/**
* A loader for the 3DL LUT format.
*
* References:
* - [3D LUTs]{@link http://download.autodesk.com/us/systemdocs/help/2011/lustre/index.html?url=./files/WSc4e151a45a3b785a24c3d9a411df9298473-7ffd.htm,topicNumber=d0e9492}
* - [Format Spec for .3dl]{@link https://community.foundry.com/discuss/topic/103636/format-spec-for-3dl?mode=Post&postID=895258}
*
* ```js
* const loader = new LUT3dlLoader();
* const map = loader.loadAsync( 'luts/Presetpro-Cinematic.3dl' );
* ```
*
* @augments Loader
* @three_import import { LUT3dlLoader } from 'three/addons/loaders/LUT3dlLoader.js';
*/
export class LUT3dlLoader extends Loader {
/**
* Constructs a new 3DL LUT loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* The texture type.
*
* @type {(UnsignedByteType|FloatType)}
* @default UnsignedByteType
*/
this.type = UnsignedByteType;
}
/**
* Sets the texture type.
*
* @param {(UnsignedByteType|FloatType)} type - The texture type to set.
* @return {LUT3dlLoader} A reference to this loader.
*/
setType( type ) {
this.type = type;
return this;
}
/**
* Starts loading from the given URL and passes the loaded 3DL LUT asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function({size:number,texture3D:Data3DTexture})} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'text' );
loader.load( url, text => {
try {
onLoad( this.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
this.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given 3DL LUT data and returns the resulting 3D data texture.
*
* @param {string} input - The raw 3DL LUT data as a string.
* @return {{size:number,texture3D:Data3DTexture}} The parsed 3DL LUT.
*/
parse( input ) {
const regExpGridInfo = /^[\d ]+$/m;
const regExpDataPoints = /^([\d.e+-]+) +([\d.e+-]+) +([\d.e+-]+) *$/gm;
// The first line describes the positions of values on the LUT grid.
let result = regExpGridInfo.exec( input );
if ( result === null ) {
throw new Error( 'LUT3dlLoader: Missing grid information' );
}
const gridLines = result[ 0 ].trim().split( /\s+/g ).map( Number );
const gridStep = gridLines[ 1 ] - gridLines[ 0 ];
const size = gridLines.length;
const sizeSq = size ** 2;
for ( let i = 1, l = gridLines.length; i < l; ++ i ) {
if ( gridStep !== ( gridLines[ i ] - gridLines[ i - 1 ] ) ) {
throw new Error( 'LUT3dlLoader: Inconsistent grid size' );
}
}
const dataFloat = new Float32Array( size ** 3 * 4 );
let maxValue = 0.0;
let index = 0;
while ( ( result = regExpDataPoints.exec( input ) ) !== null ) {
const r = Number( result[ 1 ] );
const g = Number( result[ 2 ] );
const b = Number( result[ 3 ] );
maxValue = Math.max( maxValue, r, g, b );
const bLayer = index % size;
const gLayer = Math.floor( index / size ) % size;
const rLayer = Math.floor( index / ( sizeSq ) ) % size;
// b grows first, then g, then r.
const d4 = ( bLayer * sizeSq + gLayer * size + rLayer ) * 4;
dataFloat[ d4 + 0 ] = r;
dataFloat[ d4 + 1 ] = g;
dataFloat[ d4 + 2 ] = b;
++ index;
}
// Determine the bit depth to scale the values to [0.0, 1.0].
const bits = Math.ceil( Math.log2( maxValue ) );
const maxBitValue = Math.pow( 2, bits );
const data = this.type === UnsignedByteType ? new Uint8Array( dataFloat.length ) : dataFloat;
const scale = this.type === UnsignedByteType ? 255 : 1;
for ( let i = 0, l = data.length; i < l; i += 4 ) {
const i1 = i + 1;
const i2 = i + 2;
const i3 = i + 3;
// Note: data is dataFloat when type is FloatType.
data[ i ] = dataFloat[ i ] / maxBitValue * scale;
data[ i1 ] = dataFloat[ i1 ] / maxBitValue * scale;
data[ i2 ] = dataFloat[ i2 ] / maxBitValue * scale;
data[ i3 ] = scale;
}
const texture3D = new Data3DTexture();
texture3D.image.data = data;
texture3D.image.width = size;
texture3D.image.height = size;
texture3D.image.depth = size;
texture3D.format = RGBAFormat;
texture3D.type = this.type;
texture3D.magFilter = LinearFilter;
texture3D.minFilter = LinearFilter;
texture3D.wrapS = ClampToEdgeWrapping;
texture3D.wrapT = ClampToEdgeWrapping;
texture3D.wrapR = ClampToEdgeWrapping;
texture3D.generateMipmaps = false;
texture3D.needsUpdate = true;
return {
size,
texture3D,
};
}
}

View File

@@ -0,0 +1,190 @@
import {
ClampToEdgeWrapping,
Data3DTexture,
FileLoader,
LinearFilter,
Loader,
UnsignedByteType,
Vector3,
} from 'three';
/**
* A loader for the Cube LUT format.
*
* References:
* - [Cube LUT Specification]{@link https://web.archive.org/web/20220220033515/https://wwwimages2.adobe.com/content/dam/acom/en/products/speedgrade/cc/pdfs/cube-lut-specification-1.0.pdf}
*
* ```js
* const loader = new LUTCubeLoader();
* const map = loader.loadAsync( 'luts/Bourbon 64.CUBE' );
* ```
*
* @augments Loader
* @three_import import { LUTCubeLoader } from 'three/addons/loaders/LUTCubeLoader.js';
*/
export class LUTCubeLoader extends Loader {
/**
* Constructs a new Cube LUT loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* The texture type.
*
* @type {(UnsignedByteType|FloatType)}
* @default UnsignedByteType
*/
this.type = UnsignedByteType;
}
/**
* Sets the texture type.
*
* @param {(UnsignedByteType|FloatType)} type - The texture type to set.
* @return {LUTCubeLoader} A reference to this loader.
*/
setType( type ) {
this.type = type;
return this;
}
/**
* Starts loading from the given URL and passes the loaded Cube LUT asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function({title:string,size:number,domainMin:Vector3,domainMax:Vector3,texture3D:Data3DTexture})} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'text' );
loader.load( url, text => {
try {
onLoad( this.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
this.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given Cube LUT data and returns the resulting 3D data texture.
*
* @param {string} input - The raw Cube LUT data as a string.
* @return {{title:string,size:number,domainMin:Vector3,domainMax:Vector3,texture3D:Data3DTexture}} The parsed Cube LUT.
*/
parse( input ) {
const regExpTitle = /TITLE +"([^"]*)"/;
const regExpSize = /LUT_3D_SIZE +(\d+)/;
const regExpDomainMin = /DOMAIN_MIN +([\d.]+) +([\d.]+) +([\d.]+)/;
const regExpDomainMax = /DOMAIN_MAX +([\d.]+) +([\d.]+) +([\d.]+)/;
const regExpDataPoints = /^([\d.e+-]+) +([\d.e+-]+) +([\d.e+-]+) *$/gm;
let result = regExpTitle.exec( input );
const title = ( result !== null ) ? result[ 1 ] : null;
result = regExpSize.exec( input );
if ( result === null ) {
throw new Error( 'LUTCubeLoader: Missing LUT_3D_SIZE information' );
}
const size = Number( result[ 1 ] );
const length = size ** 3 * 4;
const data = this.type === UnsignedByteType ? new Uint8Array( length ) : new Float32Array( length );
const domainMin = new Vector3( 0, 0, 0 );
const domainMax = new Vector3( 1, 1, 1 );
result = regExpDomainMin.exec( input );
if ( result !== null ) {
domainMin.set( Number( result[ 1 ] ), Number( result[ 2 ] ), Number( result[ 3 ] ) );
}
result = regExpDomainMax.exec( input );
if ( result !== null ) {
domainMax.set( Number( result[ 1 ] ), Number( result[ 2 ] ), Number( result[ 3 ] ) );
}
if ( domainMin.x > domainMax.x || domainMin.y > domainMax.y || domainMin.z > domainMax.z ) {
throw new Error( 'LUTCubeLoader: Invalid input domain' );
}
const scale = this.type === UnsignedByteType ? 255 : 1;
let i = 0;
while ( ( result = regExpDataPoints.exec( input ) ) !== null ) {
data[ i ++ ] = Number( result[ 1 ] ) * scale;
data[ i ++ ] = Number( result[ 2 ] ) * scale;
data[ i ++ ] = Number( result[ 3 ] ) * scale;
data[ i ++ ] = scale;
}
const texture3D = new Data3DTexture();
texture3D.image.data = data;
texture3D.image.width = size;
texture3D.image.height = size;
texture3D.image.depth = size;
texture3D.type = this.type;
texture3D.magFilter = LinearFilter;
texture3D.minFilter = LinearFilter;
texture3D.wrapS = ClampToEdgeWrapping;
texture3D.wrapT = ClampToEdgeWrapping;
texture3D.wrapR = ClampToEdgeWrapping;
texture3D.generateMipmaps = false;
texture3D.needsUpdate = true;
return {
title,
size,
domainMin,
domainMax,
texture3D,
};
}
}

View File

@@ -0,0 +1,190 @@
import {
Loader,
TextureLoader,
Data3DTexture,
RGBAFormat,
UnsignedByteType,
ClampToEdgeWrapping,
LinearFilter,
} from 'three';
/**
* A loader for loading LUT images.
*
* ```js
* const loader = new LUTImageLoader();
* const map = loader.loadAsync( 'luts/NeutralLUT.png' );
* ```
*
* @augments Loader
* @three_import import { LUTImageLoader } from 'three/addons/loaders/LUTImageLoader.js';
*/
export class LUTImageLoader extends Loader {
/**
* Constructs a new LUT loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* Whether to vertically flip the LUT or not.
*
* Depending on the LUT's origin, the texture has green at the bottom (e.g. for Unreal)
* or green at the top (e.g. for Unity URP Color Lookup). If you're using lut image strips
* from a Unity pipeline, then set this property to `true`.
*
* @type {boolean}
* @default false
*/
this.flip = false;
}
/**
* Starts loading from the given URL and passes the loaded LUT
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function({size:number,texture3D:Data3DTexture})} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const loader = new TextureLoader( this.manager );
loader.setCrossOrigin( this.crossOrigin );
loader.setPath( this.path );
loader.load( url, texture => {
try {
let imageData;
if ( texture.image.width < texture.image.height ) {
imageData = this._getImageData( texture );
} else {
imageData = this._horz2Vert( texture );
}
onLoad( this.parse( imageData.data, Math.min( texture.image.width, texture.image.height ) ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
this.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given LUT data and returns the resulting 3D data texture.
*
* @param {Uint8ClampedArray} dataArray - The raw LUT data.
* @param {number} size - The LUT size.
* @return {{size:number,texture3D:Data3DTexture}} An object representing the parsed LUT.
*/
parse( dataArray, size ) {
const data = new Uint8Array( dataArray );
const texture3D = new Data3DTexture();
texture3D.image.data = data;
texture3D.image.width = size;
texture3D.image.height = size;
texture3D.image.depth = size;
texture3D.format = RGBAFormat;
texture3D.type = UnsignedByteType;
texture3D.magFilter = LinearFilter;
texture3D.minFilter = LinearFilter;
texture3D.wrapS = ClampToEdgeWrapping;
texture3D.wrapT = ClampToEdgeWrapping;
texture3D.wrapR = ClampToEdgeWrapping;
texture3D.generateMipmaps = false;
texture3D.needsUpdate = true;
return {
size,
texture3D,
};
}
// internal
_getImageData( texture ) {
const width = texture.image.width;
const height = texture.image.height;
const canvas = document.createElement( 'canvas' );
canvas.width = width;
canvas.height = height;
const context = canvas.getContext( '2d' );
if ( this.flip === true ) {
context.scale( 1, - 1 );
context.translate( 0, - height );
}
context.drawImage( texture.image, 0, 0 );
return context.getImageData( 0, 0, width, height );
}
_horz2Vert( texture ) {
const width = texture.image.height;
const height = texture.image.width;
const canvas = document.createElement( 'canvas' );
canvas.width = width;
canvas.height = height;
const context = canvas.getContext( '2d' );
if ( this.flip === true ) {
context.scale( 1, - 1 );
context.translate( 0, - height );
}
for ( let i = 0; i < width; i ++ ) {
const sy = i * width;
const dy = ( this.flip ) ? height - i * width : i * width;
context.drawImage( texture.image, sy, 0, width, width, 0, dy, width, width );
}
return context.getImageData( 0, 0, width, height );
}
}

1079
node_modules/three/examples/jsm/loaders/LWOLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

130
node_modules/three/examples/jsm/loaders/LottieLoader.js generated vendored Normal file
View File

@@ -0,0 +1,130 @@
import {
FileLoader,
Loader,
CanvasTexture,
NearestFilter,
SRGBColorSpace
} from 'three';
import lottie from '../libs/lottie_canvas.module.js';
/**
* A loader for the Lottie texture animation format.
*
* The loader returns an instance of {@link CanvasTexture} to represent
* the animated texture. Two additional properties are added to each texture:
* - `animation`: The return value of `lottie.loadAnimation()` which is an object
* with an API for controlling the animation's playback.
* - `image`: The image container.
*
* ```js
* const loader = new LottieLoader();
* loader.setQuality( 2 );
* const texture = await loader.loadAsync( 'textures/lottie/24017-lottie-logo-animation.json' );
*
* const geometry = new THREE.BoxGeometry();
* const material = new THREE.MeshBasicMaterial( { map: texture } );
* const mesh = new THREE.Mesh( geometry, material );
* scene.add( mesh );
* ```
*
* @augments Loader
* @three_import import { LottieLoader } from 'three/addons/loaders/LottieLoader.js';
*/
class LottieLoader extends Loader {
/**
* Constructs a new Lottie loader.
*
* @deprecated The loader has been deprecated and will be removed with r186. Use lottie-web instead and create your animated texture manually.
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
console.warn( 'THREE.LottieLoader: The loader has been deprecated and will be removed with r186. Use lottie-web instead and create your animated texture manually.' );
}
/**
* Sets the texture quality.
*
* @param {number} value - The texture quality.
*/
setQuality( value ) {
this._quality = value;
}
/**
* Starts loading from the given URL and passes the loaded Lottie asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(CanvasTexture)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
* @returns {CanvasTexture} The Lottie texture.
*/
load( url, onLoad, onProgress, onError ) {
const quality = this._quality || 1;
const texture = new CanvasTexture();
texture.minFilter = NearestFilter;
texture.generateMipmaps = false;
texture.colorSpace = SRGBColorSpace;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
const data = JSON.parse( text );
// lottie uses container.offsetWidth and offsetHeight
// to define width/height
const container = document.createElement( 'div' );
container.style.width = data.w + 'px';
container.style.height = data.h + 'px';
document.body.appendChild( container );
const animation = lottie.loadAnimation( {
container: container,
animType: 'canvas',
loop: true,
autoplay: true,
animationData: data,
rendererSettings: { dpr: quality }
} );
texture.animation = animation;
texture.image = animation.container;
animation.addEventListener( 'enterFrame', function () {
texture.needsUpdate = true;
} );
container.style.display = 'none';
if ( onLoad !== undefined ) {
onLoad( texture );
}
}, onProgress, onError );
return texture;
}
}
export { LottieLoader };

435
node_modules/three/examples/jsm/loaders/MD2Loader.js generated vendored Normal file
View File

@@ -0,0 +1,435 @@
import {
AnimationClip,
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Loader,
Vector3
} from 'three';
const _normalData = [
[ - 0.525731, 0.000000, 0.850651 ], [ - 0.442863, 0.238856, 0.864188 ],
[ - 0.295242, 0.000000, 0.955423 ], [ - 0.309017, 0.500000, 0.809017 ],
[ - 0.162460, 0.262866, 0.951056 ], [ 0.000000, 0.000000, 1.000000 ],
[ 0.000000, 0.850651, 0.525731 ], [ - 0.147621, 0.716567, 0.681718 ],
[ 0.147621, 0.716567, 0.681718 ], [ 0.000000, 0.525731, 0.850651 ],
[ 0.309017, 0.500000, 0.809017 ], [ 0.525731, 0.000000, 0.850651 ],
[ 0.295242, 0.000000, 0.955423 ], [ 0.442863, 0.238856, 0.864188 ],
[ 0.162460, 0.262866, 0.951056 ], [ - 0.681718, 0.147621, 0.716567 ],
[ - 0.809017, 0.309017, 0.500000 ], [ - 0.587785, 0.425325, 0.688191 ],
[ - 0.850651, 0.525731, 0.000000 ], [ - 0.864188, 0.442863, 0.238856 ],
[ - 0.716567, 0.681718, 0.147621 ], [ - 0.688191, 0.587785, 0.425325 ],
[ - 0.500000, 0.809017, 0.309017 ], [ - 0.238856, 0.864188, 0.442863 ],
[ - 0.425325, 0.688191, 0.587785 ], [ - 0.716567, 0.681718, - 0.147621 ],
[ - 0.500000, 0.809017, - 0.309017 ], [ - 0.525731, 0.850651, 0.000000 ],
[ 0.000000, 0.850651, - 0.525731 ], [ - 0.238856, 0.864188, - 0.442863 ],
[ 0.000000, 0.955423, - 0.295242 ], [ - 0.262866, 0.951056, - 0.162460 ],
[ 0.000000, 1.000000, 0.000000 ], [ 0.000000, 0.955423, 0.295242 ],
[ - 0.262866, 0.951056, 0.162460 ], [ 0.238856, 0.864188, 0.442863 ],
[ 0.262866, 0.951056, 0.162460 ], [ 0.500000, 0.809017, 0.309017 ],
[ 0.238856, 0.864188, - 0.442863 ], [ 0.262866, 0.951056, - 0.162460 ],
[ 0.500000, 0.809017, - 0.309017 ], [ 0.850651, 0.525731, 0.000000 ],
[ 0.716567, 0.681718, 0.147621 ], [ 0.716567, 0.681718, - 0.147621 ],
[ 0.525731, 0.850651, 0.000000 ], [ 0.425325, 0.688191, 0.587785 ],
[ 0.864188, 0.442863, 0.238856 ], [ 0.688191, 0.587785, 0.425325 ],
[ 0.809017, 0.309017, 0.500000 ], [ 0.681718, 0.147621, 0.716567 ],
[ 0.587785, 0.425325, 0.688191 ], [ 0.955423, 0.295242, 0.000000 ],
[ 1.000000, 0.000000, 0.000000 ], [ 0.951056, 0.162460, 0.262866 ],
[ 0.850651, - 0.525731, 0.000000 ], [ 0.955423, - 0.295242, 0.000000 ],
[ 0.864188, - 0.442863, 0.238856 ], [ 0.951056, - 0.162460, 0.262866 ],
[ 0.809017, - 0.309017, 0.500000 ], [ 0.681718, - 0.147621, 0.716567 ],
[ 0.850651, 0.000000, 0.525731 ], [ 0.864188, 0.442863, - 0.238856 ],
[ 0.809017, 0.309017, - 0.500000 ], [ 0.951056, 0.162460, - 0.262866 ],
[ 0.525731, 0.000000, - 0.850651 ], [ 0.681718, 0.147621, - 0.716567 ],
[ 0.681718, - 0.147621, - 0.716567 ], [ 0.850651, 0.000000, - 0.525731 ],
[ 0.809017, - 0.309017, - 0.500000 ], [ 0.864188, - 0.442863, - 0.238856 ],
[ 0.951056, - 0.162460, - 0.262866 ], [ 0.147621, 0.716567, - 0.681718 ],
[ 0.309017, 0.500000, - 0.809017 ], [ 0.425325, 0.688191, - 0.587785 ],
[ 0.442863, 0.238856, - 0.864188 ], [ 0.587785, 0.425325, - 0.688191 ],
[ 0.688191, 0.587785, - 0.425325 ], [ - 0.147621, 0.716567, - 0.681718 ],
[ - 0.309017, 0.500000, - 0.809017 ], [ 0.000000, 0.525731, - 0.850651 ],
[ - 0.525731, 0.000000, - 0.850651 ], [ - 0.442863, 0.238856, - 0.864188 ],
[ - 0.295242, 0.000000, - 0.955423 ], [ - 0.162460, 0.262866, - 0.951056 ],
[ 0.000000, 0.000000, - 1.000000 ], [ 0.295242, 0.000000, - 0.955423 ],
[ 0.162460, 0.262866, - 0.951056 ], [ - 0.442863, - 0.238856, - 0.864188 ],
[ - 0.309017, - 0.500000, - 0.809017 ], [ - 0.162460, - 0.262866, - 0.951056 ],
[ 0.000000, - 0.850651, - 0.525731 ], [ - 0.147621, - 0.716567, - 0.681718 ],
[ 0.147621, - 0.716567, - 0.681718 ], [ 0.000000, - 0.525731, - 0.850651 ],
[ 0.309017, - 0.500000, - 0.809017 ], [ 0.442863, - 0.238856, - 0.864188 ],
[ 0.162460, - 0.262866, - 0.951056 ], [ 0.238856, - 0.864188, - 0.442863 ],
[ 0.500000, - 0.809017, - 0.309017 ], [ 0.425325, - 0.688191, - 0.587785 ],
[ 0.716567, - 0.681718, - 0.147621 ], [ 0.688191, - 0.587785, - 0.425325 ],
[ 0.587785, - 0.425325, - 0.688191 ], [ 0.000000, - 0.955423, - 0.295242 ],
[ 0.000000, - 1.000000, 0.000000 ], [ 0.262866, - 0.951056, - 0.162460 ],
[ 0.000000, - 0.850651, 0.525731 ], [ 0.000000, - 0.955423, 0.295242 ],
[ 0.238856, - 0.864188, 0.442863 ], [ 0.262866, - 0.951056, 0.162460 ],
[ 0.500000, - 0.809017, 0.309017 ], [ 0.716567, - 0.681718, 0.147621 ],
[ 0.525731, - 0.850651, 0.000000 ], [ - 0.238856, - 0.864188, - 0.442863 ],
[ - 0.500000, - 0.809017, - 0.309017 ], [ - 0.262866, - 0.951056, - 0.162460 ],
[ - 0.850651, - 0.525731, 0.000000 ], [ - 0.716567, - 0.681718, - 0.147621 ],
[ - 0.716567, - 0.681718, 0.147621 ], [ - 0.525731, - 0.850651, 0.000000 ],
[ - 0.500000, - 0.809017, 0.309017 ], [ - 0.238856, - 0.864188, 0.442863 ],
[ - 0.262866, - 0.951056, 0.162460 ], [ - 0.864188, - 0.442863, 0.238856 ],
[ - 0.809017, - 0.309017, 0.500000 ], [ - 0.688191, - 0.587785, 0.425325 ],
[ - 0.681718, - 0.147621, 0.716567 ], [ - 0.442863, - 0.238856, 0.864188 ],
[ - 0.587785, - 0.425325, 0.688191 ], [ - 0.309017, - 0.500000, 0.809017 ],
[ - 0.147621, - 0.716567, 0.681718 ], [ - 0.425325, - 0.688191, 0.587785 ],
[ - 0.162460, - 0.262866, 0.951056 ], [ 0.442863, - 0.238856, 0.864188 ],
[ 0.162460, - 0.262866, 0.951056 ], [ 0.309017, - 0.500000, 0.809017 ],
[ 0.147621, - 0.716567, 0.681718 ], [ 0.000000, - 0.525731, 0.850651 ],
[ 0.425325, - 0.688191, 0.587785 ], [ 0.587785, - 0.425325, 0.688191 ],
[ 0.688191, - 0.587785, 0.425325 ], [ - 0.955423, 0.295242, 0.000000 ],
[ - 0.951056, 0.162460, 0.262866 ], [ - 1.000000, 0.000000, 0.000000 ],
[ - 0.850651, 0.000000, 0.525731 ], [ - 0.955423, - 0.295242, 0.000000 ],
[ - 0.951056, - 0.162460, 0.262866 ], [ - 0.864188, 0.442863, - 0.238856 ],
[ - 0.951056, 0.162460, - 0.262866 ], [ - 0.809017, 0.309017, - 0.500000 ],
[ - 0.864188, - 0.442863, - 0.238856 ], [ - 0.951056, - 0.162460, - 0.262866 ],
[ - 0.809017, - 0.309017, - 0.500000 ], [ - 0.681718, 0.147621, - 0.716567 ],
[ - 0.681718, - 0.147621, - 0.716567 ], [ - 0.850651, 0.000000, - 0.525731 ],
[ - 0.688191, 0.587785, - 0.425325 ], [ - 0.587785, 0.425325, - 0.688191 ],
[ - 0.425325, 0.688191, - 0.587785 ], [ - 0.425325, - 0.688191, - 0.587785 ],
[ - 0.587785, - 0.425325, - 0.688191 ], [ - 0.688191, - 0.587785, - 0.425325 ]
];
/**
* A loader for the MD2 format.
*
* The loader represents the animations of the MD2 asset as an array of animation
* clips and stores them in the `animations` property of the geometry.
*
* ```js
* const loader = new MD2Loader();
* const geometry = await loader.loadAsync( './models/md2/ogro/ogro.md2' );
*
* const animations = geometry.animations;
* ```
*
* @augments Loader
* @three_import import { MD2Loader } from 'three/addons/loaders/MD2Loader.js';
*/
class MD2Loader extends Loader {
/**
* Constructs a new MD2 loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded MD2 asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(BufferGeometry)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} [onProgress] - Executed while the loading is in progress.
* @param {onErrorCallback} [onError] - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( buffer ) {
try {
onLoad( scope.parse( buffer ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given MD2 data and returns a geometry.
*
* @param {ArrayBuffer} buffer - The raw MD2 data as an array buffer.
* @return {BufferGeometry} The parsed geometry data.
*/
parse( buffer ) {
const data = new DataView( buffer );
// http://tfc.duke.free.fr/coding/md2-specs-en.html
const header = {};
const headerNames = [
'ident', 'version',
'skinwidth', 'skinheight',
'framesize',
'num_skins', 'num_vertices', 'num_st', 'num_tris', 'num_glcmds', 'num_frames',
'offset_skins', 'offset_st', 'offset_tris', 'offset_frames', 'offset_glcmds', 'offset_end'
];
for ( let i = 0; i < headerNames.length; i ++ ) {
header[ headerNames[ i ] ] = data.getInt32( i * 4, true );
}
if ( header.ident !== 844121161 || header.version !== 8 ) {
console.error( 'Not a valid MD2 file' );
return;
}
if ( header.offset_end !== data.byteLength ) {
console.error( 'Corrupted MD2 file' );
return;
}
//
const geometry = new BufferGeometry();
// uvs
const uvsTemp = [];
let offset = header.offset_st;
for ( let i = 0, l = header.num_st; i < l; i ++ ) {
const u = data.getInt16( offset + 0, true );
const v = data.getInt16( offset + 2, true );
uvsTemp.push( u / header.skinwidth, 1 - ( v / header.skinheight ) );
offset += 4;
}
// triangles
offset = header.offset_tris;
const vertexIndices = [];
const uvIndices = [];
for ( let i = 0, l = header.num_tris; i < l; i ++ ) {
vertexIndices.push(
data.getUint16( offset + 0, true ),
data.getUint16( offset + 2, true ),
data.getUint16( offset + 4, true )
);
uvIndices.push(
data.getUint16( offset + 6, true ),
data.getUint16( offset + 8, true ),
data.getUint16( offset + 10, true )
);
offset += 12;
}
// frames
const translation = new Vector3();
const scale = new Vector3();
const frames = [];
offset = header.offset_frames;
for ( let i = 0, l = header.num_frames; i < l; i ++ ) {
scale.set(
data.getFloat32( offset + 0, true ),
data.getFloat32( offset + 4, true ),
data.getFloat32( offset + 8, true )
);
translation.set(
data.getFloat32( offset + 12, true ),
data.getFloat32( offset + 16, true ),
data.getFloat32( offset + 20, true )
);
offset += 24;
const string = [];
for ( let j = 0; j < 16; j ++ ) {
const character = data.getUint8( offset + j );
if ( character === 0 ) break;
string[ j ] = character;
}
const frame = {
name: String.fromCharCode.apply( null, string ),
vertices: [],
normals: []
};
offset += 16;
for ( let j = 0; j < header.num_vertices; j ++ ) {
let x = data.getUint8( offset ++ );
let y = data.getUint8( offset ++ );
let z = data.getUint8( offset ++ );
const n = _normalData[ data.getUint8( offset ++ ) ];
x = x * scale.x + translation.x;
y = y * scale.y + translation.y;
z = z * scale.z + translation.z;
frame.vertices.push( x, z, y ); // convert to Y-up
frame.normals.push( n[ 0 ], n[ 2 ], n[ 1 ] ); // convert to Y-up
}
frames.push( frame );
}
// static
const positions = [];
const normals = [];
const uvs = [];
const verticesTemp = frames[ 0 ].vertices;
const normalsTemp = frames[ 0 ].normals;
for ( let i = 0, l = vertexIndices.length; i < l; i ++ ) {
const vertexIndex = vertexIndices[ i ];
let stride = vertexIndex * 3;
//
const x = verticesTemp[ stride ];
const y = verticesTemp[ stride + 1 ];
const z = verticesTemp[ stride + 2 ];
positions.push( x, y, z );
//
const nx = normalsTemp[ stride ];
const ny = normalsTemp[ stride + 1 ];
const nz = normalsTemp[ stride + 2 ];
normals.push( nx, ny, nz );
//
const uvIndex = uvIndices[ i ];
stride = uvIndex * 2;
const u = uvsTemp[ stride ];
const v = uvsTemp[ stride + 1 ];
uvs.push( u, v );
}
geometry.setAttribute( 'position', new Float32BufferAttribute( positions, 3 ) );
geometry.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
geometry.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) );
// animation
const morphPositions = [];
const morphNormals = [];
for ( let i = 0, l = frames.length; i < l; i ++ ) {
const frame = frames[ i ];
const attributeName = frame.name;
if ( frame.vertices.length > 0 ) {
const positions = [];
for ( let j = 0, jl = vertexIndices.length; j < jl; j ++ ) {
const vertexIndex = vertexIndices[ j ];
const stride = vertexIndex * 3;
const x = frame.vertices[ stride ];
const y = frame.vertices[ stride + 1 ];
const z = frame.vertices[ stride + 2 ];
positions.push( x, y, z );
}
const positionAttribute = new Float32BufferAttribute( positions, 3 );
positionAttribute.name = attributeName;
morphPositions.push( positionAttribute );
}
if ( frame.normals.length > 0 ) {
const normals = [];
for ( let j = 0, jl = vertexIndices.length; j < jl; j ++ ) {
const vertexIndex = vertexIndices[ j ];
const stride = vertexIndex * 3;
const nx = frame.normals[ stride ];
const ny = frame.normals[ stride + 1 ];
const nz = frame.normals[ stride + 2 ];
normals.push( nx, ny, nz );
}
const normalAttribute = new Float32BufferAttribute( normals, 3 );
normalAttribute.name = attributeName;
morphNormals.push( normalAttribute );
}
}
geometry.morphAttributes.position = morphPositions;
geometry.morphAttributes.normal = morphNormals;
geometry.morphTargetsRelative = false;
geometry.animations = AnimationClip.CreateClipsFromMorphTargetSequences( frames, 10, false );
return geometry;
}
}
export { MD2Loader };

147
node_modules/three/examples/jsm/loaders/MDDLoader.js generated vendored Normal file
View File

@@ -0,0 +1,147 @@
import {
AnimationClip,
BufferAttribute,
FileLoader,
Loader,
NumberKeyframeTrack
} from 'three';
/**
* A loader for the MDD format.
*
* MDD stores a position for every vertex in a model for every frame in an animation.
* Similar to BVH, it can be used to transfer animation data between different 3D applications or engines.
*
* MDD stores its data in binary format (big endian) in the following way:
*
* - number of frames (a single uint32)
* - number of vertices (a single uint32)
* - time values for each frame (sequence of float32)
* - vertex data for each frame (sequence of float32)
*
* ```js
* const loader = new MDDLoader();
* const result = await loader.loadAsync( 'models/mdd/cube.mdd' );
*
* const morphTargets = result.morphTargets;
* const clip = result.clip;
* // clip.optimize(); // optional
*
* const geometry = new THREE.BoxGeometry();
* geometry.morphAttributes.position = morphTargets; // apply morph targets (vertex data must match)
*
* const material = new THREE.MeshBasicMaterial();
*
* const mesh = new THREE.Mesh( geometry, material );
* scene.add( mesh );
*
* const mixer = new THREE.AnimationMixer( mesh );
* mixer.clipAction( clip ).play();
* ```
*
* @augments Loader
* @three_import import { MDDLoader } from 'three/addons/loaders/MDDLoader.js';
*/
class MDDLoader extends Loader {
/**
* Constructs a new MDD loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded MDD asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function({clip:AnimationClip, morphTargets:Array<BufferAttribute>})} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.load( url, function ( data ) {
onLoad( scope.parse( data ) );
}, onProgress, onError );
}
/**
* Parses the given MDD data and returns an object holding the animation clip and the respective
* morph targets.
*
* @param {ArrayBuffer} data - The raw XYZ data as an array buffer.
* @return {{clip:AnimationClip, morphTargets:Array<BufferAttribute>}} The result object.
*/
parse( data ) {
const view = new DataView( data );
const totalFrames = view.getUint32( 0 );
const totalPoints = view.getUint32( 4 );
let offset = 8;
// animation clip
const times = new Float32Array( totalFrames );
const values = new Float32Array( totalFrames * totalFrames ).fill( 0 );
for ( let i = 0; i < totalFrames; i ++ ) {
times[ i ] = view.getFloat32( offset ); offset += 4;
values[ ( totalFrames * i ) + i ] = 1;
}
const track = new NumberKeyframeTrack( '.morphTargetInfluences', times, values );
const clip = new AnimationClip( 'default', times[ times.length - 1 ], [ track ] );
// morph targets
const morphTargets = [];
for ( let i = 0; i < totalFrames; i ++ ) {
const morphTarget = new Float32Array( totalPoints * 3 );
for ( let j = 0; j < totalPoints; j ++ ) {
const stride = ( j * 3 );
morphTarget[ stride + 0 ] = view.getFloat32( offset ); offset += 4; // x
morphTarget[ stride + 1 ] = view.getFloat32( offset ); offset += 4; // y
morphTarget[ stride + 2 ] = view.getFloat32( offset ); offset += 4; // z
}
const attribute = new BufferAttribute( morphTarget, 3 );
attribute.name = 'morph_' + i;
morphTargets.push( attribute );
}
return {
morphTargets: morphTargets,
clip: clip
};
}
}
export { MDDLoader };

593
node_modules/three/examples/jsm/loaders/MTLLoader.js generated vendored Normal file
View File

@@ -0,0 +1,593 @@
import {
Color,
ColorManagement,
DefaultLoadingManager,
FileLoader,
FrontSide,
Loader,
LoaderUtils,
MeshPhongMaterial,
RepeatWrapping,
TextureLoader,
Vector2,
SRGBColorSpace
} from 'three';
/**
* A loader for the MTL format.
*
* The Material Template Library format (MTL) or .MTL File Format is a companion file format
* to OBJ that describes surface shading (material) properties of objects within one or more
* OBJ files.
*
* ```js
* const loader = new MTLLoader();
* const materials = await loader.loadAsync( 'models/obj/male02/male02.mtl' );
*
* const objLoader = new OBJLoader();
* objLoader.setMaterials( materials );
* ```
*
* @augments Loader
* @three_import import { MTLLoader } from 'three/addons/loaders/MTLLoader.js';
*/
class MTLLoader extends Loader {
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded MTL asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(MaterialCreator)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const path = ( this.path === '' ) ? LoaderUtils.extractUrlBase( url ) : this.path;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text, path ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Sets the material options.
*
* @param {MTLLoader~MaterialOptions} value - The material options.
* @return {MTLLoader} A reference to this loader.
*/
setMaterialOptions( value ) {
this.materialOptions = value;
return this;
}
/**
* Parses the given MTL data and returns the resulting material creator.
*
* @param {string} text - The raw MTL data as a string.
* @param {string} path - The URL base path.
* @return {MaterialCreator} The material creator.
*/
parse( text, path ) {
const lines = text.split( '\n' );
let info = {};
const delimiter_pattern = /\s+/;
const materialsInfo = {};
for ( let i = 0; i < lines.length; i ++ ) {
let line = lines[ i ];
line = line.trim();
if ( line.length === 0 || line.charAt( 0 ) === '#' ) {
// Blank line or comment ignore
continue;
}
const pos = line.indexOf( ' ' );
let key = ( pos >= 0 ) ? line.substring( 0, pos ) : line;
key = key.toLowerCase();
let value = ( pos >= 0 ) ? line.substring( pos + 1 ) : '';
value = value.trim();
if ( key === 'newmtl' ) {
// New material
info = { name: value };
materialsInfo[ value ] = info;
} else {
if ( key === 'ka' || key === 'kd' || key === 'ks' || key === 'ke' ) {
const ss = value.split( delimiter_pattern, 3 );
info[ key ] = [ parseFloat( ss[ 0 ] ), parseFloat( ss[ 1 ] ), parseFloat( ss[ 2 ] ) ];
} else {
info[ key ] = value;
}
}
}
const materialCreator = new MaterialCreator( this.resourcePath || path, this.materialOptions );
materialCreator.setCrossOrigin( this.crossOrigin );
materialCreator.setManager( this.manager );
materialCreator.setMaterials( materialsInfo );
return materialCreator;
}
}
/**
* Material options of `MTLLoader`.
*
* @typedef {Object} MTLLoader~MaterialOptions
* @property {(FrontSide|BackSide|DoubleSide)} [side=FrontSide] - Which side to apply the material.
* @property {(RepeatWrapping|ClampToEdgeWrapping|MirroredRepeatWrapping)} [wrap=RepeatWrapping] - What type of wrapping to apply for textures.
* @property {boolean} [normalizeRGB=false] - Whether RGB colors should be normalized to `0-1` from `0-255`.
* @property {boolean} [ignoreZeroRGBs=false] - Ignore values of RGBs (Ka,Kd,Ks) that are all 0's.
*/
class MaterialCreator {
constructor( baseUrl = '', options = {} ) {
this.baseUrl = baseUrl;
this.options = options;
this.materialsInfo = {};
this.materials = {};
this.materialsArray = [];
this.nameLookup = {};
this.crossOrigin = 'anonymous';
this.side = ( this.options.side !== undefined ) ? this.options.side : FrontSide;
this.wrap = ( this.options.wrap !== undefined ) ? this.options.wrap : RepeatWrapping;
}
setCrossOrigin( value ) {
this.crossOrigin = value;
return this;
}
setManager( value ) {
this.manager = value;
}
setMaterials( materialsInfo ) {
this.materialsInfo = this.convert( materialsInfo );
this.materials = {};
this.materialsArray = [];
this.nameLookup = {};
}
convert( materialsInfo ) {
if ( ! this.options ) return materialsInfo;
const converted = {};
for ( const mn in materialsInfo ) {
// Convert materials info into normalized form based on options
const mat = materialsInfo[ mn ];
const covmat = {};
converted[ mn ] = covmat;
for ( const prop in mat ) {
let save = true;
let value = mat[ prop ];
const lprop = prop.toLowerCase();
switch ( lprop ) {
case 'kd':
case 'ka':
case 'ks':
// Diffuse color (color under white light) using RGB values
if ( this.options && this.options.normalizeRGB ) {
value = [ value[ 0 ] / 255, value[ 1 ] / 255, value[ 2 ] / 255 ];
}
if ( this.options && this.options.ignoreZeroRGBs ) {
if ( value[ 0 ] === 0 && value[ 1 ] === 0 && value[ 2 ] === 0 ) {
// ignore
save = false;
}
}
break;
default:
break;
}
if ( save ) {
covmat[ lprop ] = value;
}
}
}
return converted;
}
preload() {
for ( const mn in this.materialsInfo ) {
this.create( mn );
}
}
getIndex( materialName ) {
return this.nameLookup[ materialName ];
}
getAsArray() {
let index = 0;
for ( const mn in this.materialsInfo ) {
this.materialsArray[ index ] = this.create( mn );
this.nameLookup[ mn ] = index;
index ++;
}
return this.materialsArray;
}
create( materialName ) {
if ( this.materials[ materialName ] === undefined ) {
this.createMaterial_( materialName );
}
return this.materials[ materialName ];
}
createMaterial_( materialName ) {
// Create material
const scope = this;
const mat = this.materialsInfo[ materialName ];
const params = {
name: materialName,
side: this.side
};
function resolveURL( baseUrl, url ) {
if ( typeof url !== 'string' || url === '' )
return '';
// Absolute URL
if ( /^https?:\/\//i.test( url ) ) return url;
return baseUrl + url;
}
function setMapForType( mapType, value ) {
if ( params[ mapType ] ) return; // Keep the first encountered texture
const texParams = scope.getTextureParams( value, params );
const map = scope.loadTexture( resolveURL( scope.baseUrl, texParams.url ) );
map.repeat.copy( texParams.scale );
map.offset.copy( texParams.offset );
map.wrapS = scope.wrap;
map.wrapT = scope.wrap;
if ( mapType === 'map' || mapType === 'emissiveMap' ) {
map.colorSpace = SRGBColorSpace;
}
params[ mapType ] = map;
}
for ( const prop in mat ) {
const value = mat[ prop ];
let n;
if ( value === '' ) continue;
switch ( prop.toLowerCase() ) {
// Ns is material specular exponent
case 'kd':
// Diffuse color (color under white light) using RGB values
params.color = ColorManagement.colorSpaceToWorking( new Color().fromArray( value ), SRGBColorSpace );
break;
case 'ks':
// Specular color (color when light is reflected from shiny surface) using RGB values
params.specular = ColorManagement.colorSpaceToWorking( new Color().fromArray( value ), SRGBColorSpace );
break;
case 'ke':
// Emissive using RGB values
params.emissive = ColorManagement.colorSpaceToWorking( new Color().fromArray( value ), SRGBColorSpace );
break;
case 'map_kd':
// Diffuse texture map
setMapForType( 'map', value );
break;
case 'map_ks':
// Specular map
setMapForType( 'specularMap', value );
break;
case 'map_ke':
// Emissive map
setMapForType( 'emissiveMap', value );
break;
case 'norm':
setMapForType( 'normalMap', value );
break;
case 'map_bump':
case 'bump':
// Bump texture map
setMapForType( 'bumpMap', value );
break;
case 'disp':
// Displacement texture map
setMapForType( 'displacementMap', value );
break;
case 'map_d':
// Alpha map
setMapForType( 'alphaMap', value );
params.transparent = true;
break;
case 'ns':
// The specular exponent (defines the focus of the specular highlight)
// A high exponent results in a tight, concentrated highlight. Ns values normally range from 0 to 1000.
params.shininess = parseFloat( value );
break;
case 'd':
n = parseFloat( value );
if ( n < 1 ) {
params.opacity = n;
params.transparent = true;
}
break;
case 'tr':
n = parseFloat( value );
if ( this.options && this.options.invertTrProperty ) n = 1 - n;
if ( n > 0 ) {
params.opacity = 1 - n;
params.transparent = true;
}
break;
default:
break;
}
}
this.materials[ materialName ] = new MeshPhongMaterial( params );
return this.materials[ materialName ];
}
getTextureParams( value, matParams ) {
const texParams = {
scale: new Vector2( 1, 1 ),
offset: new Vector2( 0, 0 )
};
const items = value.split( /\s+/ );
let pos;
pos = items.indexOf( '-bm' );
if ( pos >= 0 ) {
matParams.bumpScale = parseFloat( items[ pos + 1 ] );
items.splice( pos, 2 );
}
pos = items.indexOf( '-mm' );
if ( pos >= 0 ) {
matParams.displacementBias = parseFloat( items[ pos + 1 ] );
matParams.displacementScale = parseFloat( items[ pos + 2 ] );
items.splice( pos, 3 );
}
pos = items.indexOf( '-s' );
if ( pos >= 0 ) {
texParams.scale.set( parseFloat( items[ pos + 1 ] ), parseFloat( items[ pos + 2 ] ) );
items.splice( pos, 4 ); // we expect 3 parameters here!
}
pos = items.indexOf( '-o' );
if ( pos >= 0 ) {
texParams.offset.set( parseFloat( items[ pos + 1 ] ), parseFloat( items[ pos + 2 ] ) );
items.splice( pos, 4 ); // we expect 3 parameters here!
}
texParams.url = items.join( ' ' ).trim();
return texParams;
}
loadTexture( url, mapping, onLoad, onProgress, onError ) {
const manager = ( this.manager !== undefined ) ? this.manager : DefaultLoadingManager;
let loader = manager.getHandler( url );
if ( loader === null ) {
loader = new TextureLoader( manager );
}
if ( loader.setCrossOrigin ) loader.setCrossOrigin( this.crossOrigin );
const texture = loader.load( url, onLoad, onProgress, onError );
if ( mapping !== undefined ) texture.mapping = mapping;
return texture;
}
}
export { MTLLoader };

File diff suppressed because it is too large Load Diff

718
node_modules/three/examples/jsm/loaders/NRRDLoader.js generated vendored Normal file
View File

@@ -0,0 +1,718 @@
import {
FileLoader,
Loader,
Matrix4,
Vector3
} from 'three';
import * as fflate from '../libs/fflate.module.js';
import { Volume } from '../misc/Volume.js';
/**
* A loader for the NRRD format.
*
* ```js
* const loader = new NRRDLoader();
* const volume = await loader.loadAsync( 'models/nrrd/I.nrrd' );
* ```
*
* @augments Loader
* @three_import import { NRRDLoader } from 'three/addons/loaders/NRRDLoader.js';
*/
class NRRDLoader extends Loader {
/**
* Constructs a new NRRD loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded NRRD asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Volume)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( data ) {
try {
onLoad( scope.parse( data ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Toggles the segmentation mode.
*
* @param {boolean} segmentation - Whether to use segmentation mode or not.
*/
setSegmentation( segmentation ) {
this.segmentation = segmentation;
}
/**
* Parses the given NRRD data and returns the resulting volume data.
*
* @param {ArrayBuffer} data - The raw NRRD data as an array buffer.
* @return {Volume} The parsed volume.
*/
parse( data ) {
// this parser is largely inspired from the XTK NRRD parser : https://github.com/xtk/X
let _data = data;
let _dataPointer = 0;
const _nativeLittleEndian = new Int8Array( new Int16Array( [ 1 ] ).buffer )[ 0 ] > 0;
const _littleEndian = true;
const headerObject = {};
function scan( type, chunks ) {
let _chunkSize = 1;
let _array_type = Uint8Array;
switch ( type ) {
// 1 byte data types
case 'uchar':
break;
case 'schar':
_array_type = Int8Array;
break;
// 2 byte data types
case 'ushort':
_array_type = Uint16Array;
_chunkSize = 2;
break;
case 'sshort':
_array_type = Int16Array;
_chunkSize = 2;
break;
// 4 byte data types
case 'uint':
_array_type = Uint32Array;
_chunkSize = 4;
break;
case 'sint':
_array_type = Int32Array;
_chunkSize = 4;
break;
case 'float':
_array_type = Float32Array;
_chunkSize = 4;
break;
case 'complex':
_array_type = Float64Array;
_chunkSize = 8;
break;
case 'double':
_array_type = Float64Array;
_chunkSize = 8;
break;
}
// increase the data pointer in-place
let _bytes = new _array_type( _data.slice( _dataPointer,
_dataPointer += chunks * _chunkSize ) );
// if required, flip the endianness of the bytes
if ( _nativeLittleEndian != _littleEndian ) {
// we need to flip here since the format doesn't match the native endianness
_bytes = flipEndianness( _bytes, _chunkSize );
}
// return the byte array
return _bytes;
}
//Flips typed array endianness in-place. Based on https://github.com/kig/DataStream.js/blob/master/DataStream.js.
function flipEndianness( array, chunkSize ) {
const u8 = new Uint8Array( array.buffer, array.byteOffset, array.byteLength );
for ( let i = 0; i < array.byteLength; i += chunkSize ) {
for ( let j = i + chunkSize - 1, k = i; j > k; j --, k ++ ) {
const tmp = u8[ k ];
u8[ k ] = u8[ j ];
u8[ j ] = tmp;
}
}
return array;
}
//parse the header
function parseHeader( header ) {
let data, field, fn, i, l, m, _i, _len;
const lines = header.split( /\r?\n/ );
for ( _i = 0, _len = lines.length; _i < _len; _i ++ ) {
l = lines[ _i ];
if ( l.match( /NRRD\d+/ ) ) {
headerObject.isNrrd = true;
} else if ( ! l.match( /^#/ ) && ( m = l.match( /(.*):(.*)/ ) ) ) {
field = m[ 1 ].trim();
data = m[ 2 ].trim();
fn = _fieldFunctions[ field ];
if ( fn ) {
fn.call( headerObject, data );
} else {
headerObject[ field ] = data;
}
}
}
if ( ! headerObject.isNrrd ) {
throw new Error( 'Not an NRRD file' );
}
if ( headerObject.encoding === 'bz2' || headerObject.encoding === 'bzip2' ) {
throw new Error( 'Bzip is not supported' );
}
if ( ! headerObject.vectors ) {
//if no space direction is set, let's use the identity
headerObject.vectors = [ ];
headerObject.vectors.push( [ 1, 0, 0 ] );
headerObject.vectors.push( [ 0, 1, 0 ] );
headerObject.vectors.push( [ 0, 0, 1 ] );
//apply spacing if defined
if ( headerObject.spacings ) {
for ( i = 0; i <= 2; i ++ ) {
if ( ! isNaN( headerObject.spacings[ i ] ) ) {
for ( let j = 0; j <= 2; j ++ ) {
headerObject.vectors[ i ][ j ] *= headerObject.spacings[ i ];
}
}
}
}
}
}
//parse the data when registered as one of this type : 'text', 'ascii', 'txt'
function parseDataAsText( data, start, end ) {
let number = '';
start = start || 0;
end = end || data.length;
let value;
//length of the result is the product of the sizes
const lengthOfTheResult = headerObject.sizes.reduce( function ( previous, current ) {
return previous * current;
}, 1 );
let base = 10;
if ( headerObject.encoding === 'hex' ) {
base = 16;
}
const result = new headerObject.__array( lengthOfTheResult );
let resultIndex = 0;
let parsingFunction = parseInt;
if ( headerObject.__array === Float32Array || headerObject.__array === Float64Array ) {
parsingFunction = parseFloat;
}
for ( let i = start; i < end; i ++ ) {
value = data[ i ];
//if value is not a space
if ( ( value < 9 || value > 13 ) && value !== 32 ) {
number += String.fromCharCode( value );
} else {
if ( number !== '' ) {
result[ resultIndex ] = parsingFunction( number, base );
resultIndex ++;
}
number = '';
}
}
if ( number !== '' ) {
result[ resultIndex ] = parsingFunction( number, base );
resultIndex ++;
}
return result;
}
const _bytes = scan( 'uchar', data.byteLength );
const _length = _bytes.length;
let _header = null;
let _data_start = 0;
let i;
for ( i = 1; i < _length; i ++ ) {
if ( _bytes[ i - 1 ] == 10 && _bytes[ i ] == 10 ) {
// we found two line breaks in a row
// now we know what the header is
_header = this._parseChars( _bytes, 0, i - 2 );
// this is were the data starts
_data_start = i + 1;
break;
}
}
// parse the header
parseHeader( _header );
_data = _bytes.subarray( _data_start ); // the data without header
if ( headerObject.encoding.substring( 0, 2 ) === 'gz' ) {
// we need to decompress the datastream
// here we start the unzipping and get a typed Uint8Array back
_data = fflate.gunzipSync( new Uint8Array( _data ) );
} else if ( headerObject.encoding === 'ascii' || headerObject.encoding === 'text' || headerObject.encoding === 'txt' || headerObject.encoding === 'hex' ) {
_data = parseDataAsText( _data );
} else if ( headerObject.encoding === 'raw' ) {
//we need to copy the array to create a new array buffer, else we retrieve the original arraybuffer with the header
const _copy = new Uint8Array( _data.length );
for ( let i = 0; i < _data.length; i ++ ) {
_copy[ i ] = _data[ i ];
}
_data = _copy;
}
// .. let's use the underlying array buffer
_data = _data.buffer;
const volume = new Volume();
volume.header = headerObject;
volume.segmentation = this.segmentation;
//
// parse the (unzipped) data to a datastream of the correct type
//
volume.data = new headerObject.__array( _data );
// get the min and max intensities
const min_max = volume.computeMinMax();
const min = min_max[ 0 ];
const max = min_max[ 1 ];
// attach the scalar range to the volume
volume.windowLow = min;
volume.windowHigh = max;
// get the image dimensions
volume.dimensions = [ headerObject.sizes[ 0 ], headerObject.sizes[ 1 ], headerObject.sizes[ 2 ] ];
volume.xLength = volume.dimensions[ 0 ];
volume.yLength = volume.dimensions[ 1 ];
volume.zLength = volume.dimensions[ 2 ];
// Identify axis order in the space-directions matrix from the header if possible.
if ( headerObject.vectors ) {
const xIndex = headerObject.vectors.findIndex( vector => vector[ 0 ] !== 0 );
const yIndex = headerObject.vectors.findIndex( vector => vector[ 1 ] !== 0 );
const zIndex = headerObject.vectors.findIndex( vector => vector[ 2 ] !== 0 );
const axisOrder = [];
if ( xIndex !== yIndex && xIndex !== zIndex && yIndex !== zIndex ) {
axisOrder[ xIndex ] = 'x';
axisOrder[ yIndex ] = 'y';
axisOrder[ zIndex ] = 'z';
} else {
axisOrder[ 0 ] = 'x';
axisOrder[ 1 ] = 'y';
axisOrder[ 2 ] = 'z';
}
volume.axisOrder = axisOrder;
} else {
volume.axisOrder = [ 'x', 'y', 'z' ];
}
// spacing
const spacingX = new Vector3().fromArray( headerObject.vectors[ 0 ] ).length();
const spacingY = new Vector3().fromArray( headerObject.vectors[ 1 ] ).length();
const spacingZ = new Vector3().fromArray( headerObject.vectors[ 2 ] ).length();
volume.spacing = [ spacingX, spacingY, spacingZ ];
// Create IJKtoRAS matrix
volume.matrix = new Matrix4();
const transitionMatrix = new Matrix4();
if ( headerObject.space === 'left-posterior-superior' ) {
transitionMatrix.set(
- 1, 0, 0, 0,
0, - 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
);
} else if ( headerObject.space === 'left-anterior-superior' ) {
transitionMatrix.set(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, - 1, 0,
0, 0, 0, 1
);
}
if ( ! headerObject.vectors ) {
volume.matrix.set(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1 );
} else {
const v = headerObject.vectors;
const ijk_to_transition = new Matrix4().set(
v[ 0 ][ 0 ], v[ 1 ][ 0 ], v[ 2 ][ 0 ], 0,
v[ 0 ][ 1 ], v[ 1 ][ 1 ], v[ 2 ][ 1 ], 0,
v[ 0 ][ 2 ], v[ 1 ][ 2 ], v[ 2 ][ 2 ], 0,
0, 0, 0, 1
);
const transition_to_ras = new Matrix4().multiplyMatrices( ijk_to_transition, transitionMatrix );
volume.matrix = transition_to_ras;
}
volume.inverseMatrix = new Matrix4();
volume.inverseMatrix.copy( volume.matrix ).invert();
volume.RASDimensions = [
Math.floor( volume.xLength * spacingX ),
Math.floor( volume.yLength * spacingY ),
Math.floor( volume.zLength * spacingZ )
];
// .. and set the default threshold
// only if the threshold was not already set
if ( volume.lowerThreshold === - Infinity ) {
volume.lowerThreshold = min;
}
if ( volume.upperThreshold === Infinity ) {
volume.upperThreshold = max;
}
return volume;
}
_parseChars( array, start, end ) {
// without borders, use the whole array
if ( start === undefined ) {
start = 0;
}
if ( end === undefined ) {
end = array.length;
}
let output = '';
// create and append the chars
let i = 0;
for ( i = start; i < end; ++ i ) {
output += String.fromCharCode( array[ i ] );
}
return output;
}
}
const _fieldFunctions = {
type: function ( data ) {
switch ( data ) {
case 'uchar':
case 'unsigned char':
case 'uint8':
case 'uint8_t':
this.__array = Uint8Array;
break;
case 'signed char':
case 'int8':
case 'int8_t':
this.__array = Int8Array;
break;
case 'short':
case 'short int':
case 'signed short':
case 'signed short int':
case 'int16':
case 'int16_t':
this.__array = Int16Array;
break;
case 'ushort':
case 'unsigned short':
case 'unsigned short int':
case 'uint16':
case 'uint16_t':
this.__array = Uint16Array;
break;
case 'int':
case 'signed int':
case 'int32':
case 'int32_t':
this.__array = Int32Array;
break;
case 'uint':
case 'unsigned int':
case 'uint32':
case 'uint32_t':
this.__array = Uint32Array;
break;
case 'float':
this.__array = Float32Array;
break;
case 'double':
this.__array = Float64Array;
break;
default:
throw new Error( 'Unsupported NRRD data type: ' + data );
}
return this.type = data;
},
endian: function ( data ) {
return this.endian = data;
},
encoding: function ( data ) {
return this.encoding = data;
},
dimension: function ( data ) {
return this.dim = parseInt( data, 10 );
},
sizes: function ( data ) {
let i;
return this.sizes = ( function () {
const _ref = data.split( /\s+/ );
const _results = [];
for ( let _i = 0, _len = _ref.length; _i < _len; _i ++ ) {
i = _ref[ _i ];
_results.push( parseInt( i, 10 ) );
}
return _results;
} )();
},
space: function ( data ) {
return this.space = data;
},
'space origin': function ( data ) {
return this.space_origin = data.split( '(' )[ 1 ].split( ')' )[ 0 ].split( ',' );
},
'space directions': function ( data ) {
let f, v;
const parts = data.match( /\(.*?\)/g );
return this.vectors = ( function () {
const _results = [];
for ( let _i = 0, _len = parts.length; _i < _len; _i ++ ) {
v = parts[ _i ];
_results.push( ( function () {
const _ref = v.slice( 1, - 1 ).split( /,/ );
const _results2 = [];
for ( let _j = 0, _len2 = _ref.length; _j < _len2; _j ++ ) {
f = _ref[ _j ];
_results2.push( parseFloat( f ) );
}
return _results2;
} )() );
}
return _results;
} )();
},
spacings: function ( data ) {
let f;
const parts = data.split( /\s+/ );
return this.spacings = ( function () {
const _results = [];
for ( let _i = 0, _len = parts.length; _i < _len; _i ++ ) {
f = parts[ _i ];
_results.push( parseFloat( f ) );
}
return _results;
} )();
}
};
export { NRRDLoader };

955
node_modules/three/examples/jsm/loaders/OBJLoader.js generated vendored Normal file
View File

@@ -0,0 +1,955 @@
import {
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Group,
LineBasicMaterial,
LineSegments,
Loader,
Material,
Mesh,
MeshPhongMaterial,
Points,
PointsMaterial,
Vector3,
Color,
SRGBColorSpace
} from 'three';
// o object_name | g group_name
const _object_pattern = /^[og]\s*(.+)?/;
// mtllib file_reference
const _material_library_pattern = /^mtllib /;
// usemtl material_name
const _material_use_pattern = /^usemtl /;
// usemap map_name
const _map_use_pattern = /^usemap /;
const _face_vertex_data_separator_pattern = /\s+/;
const _vA = new Vector3();
const _vB = new Vector3();
const _vC = new Vector3();
const _ab = new Vector3();
const _cb = new Vector3();
const _color = new Color();
function ParserState() {
const state = {
objects: [],
object: {},
vertices: [],
normals: [],
colors: [],
uvs: [],
materials: {},
materialLibraries: [],
startObject: function ( name, fromDeclaration ) {
// If the current object (initial from reset) is not from a g/o declaration in the parsed
// file. We need to use it for the first parsed g/o to keep things in sync.
if ( this.object && this.object.fromDeclaration === false ) {
this.object.name = name;
this.object.fromDeclaration = ( fromDeclaration !== false );
return;
}
const previousMaterial = ( this.object && typeof this.object.currentMaterial === 'function' ? this.object.currentMaterial() : undefined );
if ( this.object && typeof this.object._finalize === 'function' ) {
this.object._finalize( true );
}
this.object = {
name: name || '',
fromDeclaration: ( fromDeclaration !== false ),
geometry: {
vertices: [],
normals: [],
colors: [],
uvs: [],
hasUVIndices: false
},
materials: [],
smooth: true,
startMaterial: function ( name, libraries ) {
const previous = this._finalize( false );
// New usemtl declaration overwrites an inherited material, except if faces were declared
// after the material, then it must be preserved for proper MultiMaterial continuation.
if ( previous && ( previous.inherited || previous.groupCount <= 0 ) ) {
this.materials.splice( previous.index, 1 );
}
const material = {
index: this.materials.length,
name: name || '',
mtllib: ( Array.isArray( libraries ) && libraries.length > 0 ? libraries[ libraries.length - 1 ] : '' ),
smooth: ( previous !== undefined ? previous.smooth : this.smooth ),
groupStart: ( previous !== undefined ? previous.groupEnd : 0 ),
groupEnd: - 1,
groupCount: - 1,
inherited: false,
clone: function ( index ) {
const cloned = {
index: ( typeof index === 'number' ? index : this.index ),
name: this.name,
mtllib: this.mtllib,
smooth: this.smooth,
groupStart: 0,
groupEnd: - 1,
groupCount: - 1,
inherited: false
};
cloned.clone = this.clone.bind( cloned );
return cloned;
}
};
this.materials.push( material );
return material;
},
currentMaterial: function () {
if ( this.materials.length > 0 ) {
return this.materials[ this.materials.length - 1 ];
}
return undefined;
},
_finalize: function ( end ) {
const lastMultiMaterial = this.currentMaterial();
if ( lastMultiMaterial && lastMultiMaterial.groupEnd === - 1 ) {
lastMultiMaterial.groupEnd = this.geometry.vertices.length / 3;
lastMultiMaterial.groupCount = lastMultiMaterial.groupEnd - lastMultiMaterial.groupStart;
lastMultiMaterial.inherited = false;
}
// Ignore objects tail materials if no face declarations followed them before a new o/g started.
if ( end && this.materials.length > 1 ) {
for ( let mi = this.materials.length - 1; mi >= 0; mi -- ) {
if ( this.materials[ mi ].groupCount <= 0 ) {
this.materials.splice( mi, 1 );
}
}
}
// Guarantee at least one empty material, this makes the creation later more straight forward.
if ( end && this.materials.length === 0 ) {
this.materials.push( {
name: '',
smooth: this.smooth
} );
}
return lastMultiMaterial;
}
};
// Inherit previous objects material.
// Spec tells us that a declared material must be set to all objects until a new material is declared.
// If a usemtl declaration is encountered while this new object is being parsed, it will
// overwrite the inherited material. Exception being that there was already face declarations
// to the inherited material, then it will be preserved for proper MultiMaterial continuation.
if ( previousMaterial && previousMaterial.name && typeof previousMaterial.clone === 'function' ) {
const declared = previousMaterial.clone( 0 );
declared.inherited = true;
this.object.materials.push( declared );
}
this.objects.push( this.object );
},
finalize: function () {
if ( this.object && typeof this.object._finalize === 'function' ) {
this.object._finalize( true );
}
},
parseVertexIndex: function ( value, len ) {
const index = parseInt( value, 10 );
return ( index >= 0 ? index - 1 : index + len / 3 ) * 3;
},
parseNormalIndex: function ( value, len ) {
const index = parseInt( value, 10 );
return ( index >= 0 ? index - 1 : index + len / 3 ) * 3;
},
parseUVIndex: function ( value, len ) {
const index = parseInt( value, 10 );
return ( index >= 0 ? index - 1 : index + len / 2 ) * 2;
},
addVertex: function ( a, b, c ) {
const src = this.vertices;
const dst = this.object.geometry.vertices;
dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
dst.push( src[ b + 0 ], src[ b + 1 ], src[ b + 2 ] );
dst.push( src[ c + 0 ], src[ c + 1 ], src[ c + 2 ] );
},
addVertexPoint: function ( a ) {
const src = this.vertices;
const dst = this.object.geometry.vertices;
dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
},
addVertexLine: function ( a ) {
const src = this.vertices;
const dst = this.object.geometry.vertices;
dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
},
addNormal: function ( a, b, c ) {
const src = this.normals;
const dst = this.object.geometry.normals;
dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
dst.push( src[ b + 0 ], src[ b + 1 ], src[ b + 2 ] );
dst.push( src[ c + 0 ], src[ c + 1 ], src[ c + 2 ] );
},
addFaceNormal: function ( a, b, c ) {
const src = this.vertices;
const dst = this.object.geometry.normals;
_vA.fromArray( src, a );
_vB.fromArray( src, b );
_vC.fromArray( src, c );
_cb.subVectors( _vC, _vB );
_ab.subVectors( _vA, _vB );
_cb.cross( _ab );
_cb.normalize();
dst.push( _cb.x, _cb.y, _cb.z );
dst.push( _cb.x, _cb.y, _cb.z );
dst.push( _cb.x, _cb.y, _cb.z );
},
addColor: function ( a, b, c ) {
const src = this.colors;
const dst = this.object.geometry.colors;
if ( src[ a ] !== undefined ) dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
if ( src[ b ] !== undefined ) dst.push( src[ b + 0 ], src[ b + 1 ], src[ b + 2 ] );
if ( src[ c ] !== undefined ) dst.push( src[ c + 0 ], src[ c + 1 ], src[ c + 2 ] );
},
addUV: function ( a, b, c ) {
const src = this.uvs;
const dst = this.object.geometry.uvs;
dst.push( src[ a + 0 ], src[ a + 1 ] );
dst.push( src[ b + 0 ], src[ b + 1 ] );
dst.push( src[ c + 0 ], src[ c + 1 ] );
},
addDefaultUV: function () {
const dst = this.object.geometry.uvs;
dst.push( 0, 0 );
dst.push( 0, 0 );
dst.push( 0, 0 );
},
addUVLine: function ( a ) {
const src = this.uvs;
const dst = this.object.geometry.uvs;
dst.push( src[ a + 0 ], src[ a + 1 ] );
},
addFace: function ( a, b, c, ua, ub, uc, na, nb, nc ) {
const vLen = this.vertices.length;
let ia = this.parseVertexIndex( a, vLen );
let ib = this.parseVertexIndex( b, vLen );
let ic = this.parseVertexIndex( c, vLen );
this.addVertex( ia, ib, ic );
this.addColor( ia, ib, ic );
// normals
if ( na !== undefined && na !== '' ) {
const nLen = this.normals.length;
ia = this.parseNormalIndex( na, nLen );
ib = this.parseNormalIndex( nb, nLen );
ic = this.parseNormalIndex( nc, nLen );
this.addNormal( ia, ib, ic );
} else {
this.addFaceNormal( ia, ib, ic );
}
// uvs
if ( ua !== undefined && ua !== '' ) {
const uvLen = this.uvs.length;
ia = this.parseUVIndex( ua, uvLen );
ib = this.parseUVIndex( ub, uvLen );
ic = this.parseUVIndex( uc, uvLen );
this.addUV( ia, ib, ic );
this.object.geometry.hasUVIndices = true;
} else {
// add placeholder values (for inconsistent face definitions)
this.addDefaultUV();
}
},
addPointGeometry: function ( vertices ) {
this.object.geometry.type = 'Points';
const vLen = this.vertices.length;
for ( let vi = 0, l = vertices.length; vi < l; vi ++ ) {
const index = this.parseVertexIndex( vertices[ vi ], vLen );
this.addVertexPoint( index );
this.addColor( index );
}
},
addLineGeometry: function ( vertices, uvs ) {
this.object.geometry.type = 'Line';
const vLen = this.vertices.length;
const uvLen = this.uvs.length;
for ( let vi = 0, l = vertices.length; vi < l; vi ++ ) {
this.addVertexLine( this.parseVertexIndex( vertices[ vi ], vLen ) );
}
for ( let uvi = 0, l = uvs.length; uvi < l; uvi ++ ) {
this.addUVLine( this.parseUVIndex( uvs[ uvi ], uvLen ) );
}
}
};
state.startObject( '', false );
return state;
}
/**
* A loader for the OBJ format.
*
* The [OBJ format]{@link https://en.wikipedia.org/wiki/Wavefront_.obj_file} is a simple data-format that
* represents 3D geometry in a human readable format as the position of each vertex, the UV position of
* each texture coordinate vertex, vertex normals, and the faces that make each polygon defined as a list
* of vertices, and texture vertices.
*
* ```js
* const loader = new OBJLoader();
* const object = await loader.loadAsync( 'models/monster.obj' );
* scene.add( object );
* ```
*
* @augments Loader
* @three_import import { OBJLoader } from 'three/addons/loaders/OBJLoader.js';
*/
class OBJLoader extends Loader {
/**
* Constructs a new OBJ loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* A reference to a material creator.
*
* @type {?MaterialCreator}
* @default null
*/
this.materials = null;
}
/**
* Starts loading from the given URL and passes the loaded OBJ asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Group)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Sets the material creator for this OBJ. This object is loaded via {@link MTLLoader}.
*
* @param {MaterialCreator} materials - An object that creates the materials for this OBJ.
* @return {OBJLoader} A reference to this loader.
*/
setMaterials( materials ) {
this.materials = materials;
return this;
}
/**
* Parses the given OBJ data and returns the resulting group.
*
* @param {string} text - The raw OBJ data as a string.
* @return {Group} The parsed OBJ.
*/
parse( text ) {
const state = new ParserState();
if ( text.indexOf( '\r\n' ) !== - 1 ) {
// This is faster than String.split with regex that splits on both
text = text.replace( /\r\n/g, '\n' );
}
if ( text.indexOf( '\\\n' ) !== - 1 ) {
// join lines separated by a line continuation character (\)
text = text.replace( /\\\n/g, '' );
}
const lines = text.split( '\n' );
let result = [];
for ( let i = 0, l = lines.length; i < l; i ++ ) {
const line = lines[ i ].trimStart();
if ( line.length === 0 ) continue;
const lineFirstChar = line.charAt( 0 );
// @todo invoke passed in handler if any
if ( lineFirstChar === '#' ) continue; // skip comments
if ( lineFirstChar === 'v' ) {
const data = line.split( _face_vertex_data_separator_pattern );
switch ( data[ 0 ] ) {
case 'v':
state.vertices.push(
parseFloat( data[ 1 ] ),
parseFloat( data[ 2 ] ),
parseFloat( data[ 3 ] )
);
if ( data.length >= 7 ) {
_color.setRGB(
parseFloat( data[ 4 ] ),
parseFloat( data[ 5 ] ),
parseFloat( data[ 6 ] ),
SRGBColorSpace
);
state.colors.push( _color.r, _color.g, _color.b );
} else {
// if no colors are defined, add placeholders so color and vertex indices match
state.colors.push( undefined, undefined, undefined );
}
break;
case 'vn':
state.normals.push(
parseFloat( data[ 1 ] ),
parseFloat( data[ 2 ] ),
parseFloat( data[ 3 ] )
);
break;
case 'vt':
state.uvs.push(
parseFloat( data[ 1 ] ),
parseFloat( data[ 2 ] )
);
break;
}
} else if ( lineFirstChar === 'f' ) {
const lineData = line.slice( 1 ).trim();
const vertexData = lineData.split( _face_vertex_data_separator_pattern );
const faceVertices = [];
// Parse the face vertex data into an easy to work with format
for ( let j = 0, jl = vertexData.length; j < jl; j ++ ) {
const vertex = vertexData[ j ];
if ( vertex.length > 0 ) {
const vertexParts = vertex.split( '/' );
faceVertices.push( vertexParts );
}
}
// Draw an edge between the first vertex and all subsequent vertices to form an n-gon
const v1 = faceVertices[ 0 ];
for ( let j = 1, jl = faceVertices.length - 1; j < jl; j ++ ) {
const v2 = faceVertices[ j ];
const v3 = faceVertices[ j + 1 ];
state.addFace(
v1[ 0 ], v2[ 0 ], v3[ 0 ],
v1[ 1 ], v2[ 1 ], v3[ 1 ],
v1[ 2 ], v2[ 2 ], v3[ 2 ]
);
}
} else if ( lineFirstChar === 'l' ) {
const lineParts = line.substring( 1 ).trim().split( ' ' );
let lineVertices = [];
const lineUVs = [];
if ( line.indexOf( '/' ) === - 1 ) {
lineVertices = lineParts;
} else {
for ( let li = 0, llen = lineParts.length; li < llen; li ++ ) {
const parts = lineParts[ li ].split( '/' );
if ( parts[ 0 ] !== '' ) lineVertices.push( parts[ 0 ] );
if ( parts[ 1 ] !== '' ) lineUVs.push( parts[ 1 ] );
}
}
state.addLineGeometry( lineVertices, lineUVs );
} else if ( lineFirstChar === 'p' ) {
const lineData = line.slice( 1 ).trim();
const pointData = lineData.split( ' ' );
state.addPointGeometry( pointData );
} else if ( ( result = _object_pattern.exec( line ) ) !== null ) {
// o object_name
// or
// g group_name
// WORKAROUND: https://bugs.chromium.org/p/v8/issues/detail?id=2869
// let name = result[ 0 ].slice( 1 ).trim();
const name = ( ' ' + result[ 0 ].slice( 1 ).trim() ).slice( 1 );
state.startObject( name );
} else if ( _material_use_pattern.test( line ) ) {
// material
state.object.startMaterial( line.substring( 7 ).trim(), state.materialLibraries );
} else if ( _material_library_pattern.test( line ) ) {
// mtl file
state.materialLibraries.push( line.substring( 7 ).trim() );
} else if ( _map_use_pattern.test( line ) ) {
// the line is parsed but ignored since the loader assumes textures are defined MTL files
// (according to https://www.okino.com/conv/imp_wave.htm, 'usemap' is the old-style Wavefront texture reference method)
console.warn( 'THREE.OBJLoader: Rendering identifier "usemap" not supported. Textures must be defined in MTL files.' );
} else if ( lineFirstChar === 's' ) {
result = line.split( ' ' );
// smooth shading
// @todo Handle files that have varying smooth values for a set of faces inside one geometry,
// but does not define a usemtl for each face set.
// This should be detected and a dummy material created (later MultiMaterial and geometry groups).
// This requires some care to not create extra material on each smooth value for "normal" obj files.
// where explicit usemtl defines geometry groups.
// Example asset: examples/models/obj/cerberus/Cerberus.obj
/*
* http://paulbourke.net/dataformats/obj/
*
* From chapter "Grouping" Syntax explanation "s group_number":
* "group_number is the smoothing group number. To turn off smoothing groups, use a value of 0 or off.
* Polygonal elements use group numbers to put elements in different smoothing groups. For free-form
* surfaces, smoothing groups are either turned on or off; there is no difference between values greater
* than 0."
*/
if ( result.length > 1 ) {
const value = result[ 1 ].trim().toLowerCase();
state.object.smooth = ( value !== '0' && value !== 'off' );
} else {
// ZBrush can produce "s" lines #11707
state.object.smooth = true;
}
const material = state.object.currentMaterial();
if ( material ) material.smooth = state.object.smooth;
} else {
// Handle null terminated files without exception
if ( line === '\0' ) continue;
console.warn( 'THREE.OBJLoader: Unexpected line: "' + line + '"' );
}
}
state.finalize();
const container = new Group();
container.materialLibraries = [].concat( state.materialLibraries );
const hasPrimitives = ! ( state.objects.length === 1 && state.objects[ 0 ].geometry.vertices.length === 0 );
if ( hasPrimitives === true ) {
for ( let i = 0, l = state.objects.length; i < l; i ++ ) {
const object = state.objects[ i ];
const geometry = object.geometry;
const materials = object.materials;
const isLine = ( geometry.type === 'Line' );
const isPoints = ( geometry.type === 'Points' );
let hasVertexColors = false;
// Skip o/g line declarations that did not follow with any faces
if ( geometry.vertices.length === 0 ) continue;
const buffergeometry = new BufferGeometry();
buffergeometry.setAttribute( 'position', new Float32BufferAttribute( geometry.vertices, 3 ) );
if ( geometry.normals.length > 0 ) {
buffergeometry.setAttribute( 'normal', new Float32BufferAttribute( geometry.normals, 3 ) );
}
if ( geometry.colors.length > 0 ) {
hasVertexColors = true;
buffergeometry.setAttribute( 'color', new Float32BufferAttribute( geometry.colors, 3 ) );
}
if ( geometry.hasUVIndices === true ) {
buffergeometry.setAttribute( 'uv', new Float32BufferAttribute( geometry.uvs, 2 ) );
}
// Create materials
const createdMaterials = [];
for ( let mi = 0, miLen = materials.length; mi < miLen; mi ++ ) {
const sourceMaterial = materials[ mi ];
const materialHash = sourceMaterial.name + '_' + sourceMaterial.smooth + '_' + hasVertexColors;
let material = state.materials[ materialHash ];
if ( this.materials !== null ) {
material = this.materials.create( sourceMaterial.name );
// mtl etc. loaders probably can't create line materials correctly, copy properties to a line material.
if ( isLine && material && ! ( material instanceof LineBasicMaterial ) ) {
const materialLine = new LineBasicMaterial();
Material.prototype.copy.call( materialLine, material );
materialLine.color.copy( material.color );
material = materialLine;
} else if ( isPoints && material && ! ( material instanceof PointsMaterial ) ) {
const materialPoints = new PointsMaterial( { size: 10, sizeAttenuation: false } );
Material.prototype.copy.call( materialPoints, material );
materialPoints.color.copy( material.color );
materialPoints.map = material.map;
material = materialPoints;
}
}
if ( material === undefined ) {
if ( isLine ) {
material = new LineBasicMaterial();
} else if ( isPoints ) {
material = new PointsMaterial( { size: 1, sizeAttenuation: false } );
} else {
material = new MeshPhongMaterial();
}
material.name = sourceMaterial.name;
material.flatShading = sourceMaterial.smooth ? false : true;
material.vertexColors = hasVertexColors;
state.materials[ materialHash ] = material;
}
createdMaterials.push( material );
}
// Create mesh
let mesh;
if ( createdMaterials.length > 1 ) {
for ( let mi = 0, miLen = materials.length; mi < miLen; mi ++ ) {
const sourceMaterial = materials[ mi ];
buffergeometry.addGroup( sourceMaterial.groupStart, sourceMaterial.groupCount, mi );
}
if ( isLine ) {
mesh = new LineSegments( buffergeometry, createdMaterials );
} else if ( isPoints ) {
mesh = new Points( buffergeometry, createdMaterials );
} else {
mesh = new Mesh( buffergeometry, createdMaterials );
}
} else {
if ( isLine ) {
mesh = new LineSegments( buffergeometry, createdMaterials[ 0 ] );
} else if ( isPoints ) {
mesh = new Points( buffergeometry, createdMaterials[ 0 ] );
} else {
mesh = new Mesh( buffergeometry, createdMaterials[ 0 ] );
}
}
mesh.name = object.name;
container.add( mesh );
}
} else {
// if there is only the default parser state object with no geometry data, interpret data as point cloud
if ( state.vertices.length > 0 ) {
const material = new PointsMaterial( { size: 1, sizeAttenuation: false } );
const buffergeometry = new BufferGeometry();
buffergeometry.setAttribute( 'position', new Float32BufferAttribute( state.vertices, 3 ) );
if ( state.colors.length > 0 && state.colors[ 0 ] !== undefined ) {
buffergeometry.setAttribute( 'color', new Float32BufferAttribute( state.colors, 3 ) );
material.vertexColors = true;
}
const points = new Points( buffergeometry, material );
container.add( points );
}
}
return container;
}
}
export { OBJLoader };

618
node_modules/three/examples/jsm/loaders/PCDLoader.js generated vendored Normal file
View File

@@ -0,0 +1,618 @@
import {
BufferGeometry,
Color,
FileLoader,
Float32BufferAttribute,
Int32BufferAttribute,
Loader,
Points,
PointsMaterial,
SRGBColorSpace
} from 'three';
/**
* A loader for the Point Cloud Data (PCD) format.
*
* PCDLoader supports ASCII and (compressed) binary files as well as the following PCD fields:
* - x y z
* - rgb
* - normal_x normal_y normal_z
* - intensity
* - label
*
* ```js
* const loader = new PCDLoader();
*
* const points = await loader.loadAsync( './models/pcd/binary/Zaghetto.pcd' );
* points.geometry.center(); // optional
* points.geometry.rotateX( Math.PI ); // optional
* scene.add( points );
* ```
*
* @augments Loader
* @three_import import { PCDLoader } from 'three/addons/loaders/PCDLoader.js';
*/
class PCDLoader extends Loader {
/**
* Constructs a new PCD loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* Whether to use little Endian or not.
*
* @type {boolean}
* @default true
*/
this.littleEndian = true;
}
/**
* Starts loading from the given URL and passes the loaded PCD asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Points)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( data ) {
try {
onLoad( scope.parse( data ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Get dataview value by field type and size.
*
* @param {DataView} dataview - The DataView to read from.
* @param {number} offset - The offset to start reading from.
* @param {'F' | 'U' | 'I'} type - Field type.
* @param {number} size - Field size.
* @returns {number} Field value.
*/
_getDataView( dataview, offset, type, size ) {
switch ( type ) {
case 'F': {
if ( size === 8 ) {
return dataview.getFloat64( offset, this.littleEndian );
}
return dataview.getFloat32( offset, this.littleEndian );
}
case 'I': {
if ( size === 1 ) {
return dataview.getInt8( offset );
}
if ( size === 2 ) {
return dataview.getInt16( offset, this.littleEndian );
}
return dataview.getInt32( offset, this.littleEndian );
}
case 'U': {
if ( size === 1 ) {
return dataview.getUint8( offset );
}
if ( size === 2 ) {
return dataview.getUint16( offset, this.littleEndian );
}
return dataview.getUint32( offset, this.littleEndian );
}
}
}
/**
* Parses the given PCD data and returns a point cloud.
*
* @param {ArrayBuffer} data - The raw PCD data as an array buffer.
* @return {Points} The parsed point cloud.
*/
parse( data ) {
// from https://gitlab.com/taketwo/three-pcd-loader/blob/master/decompress-lzf.js
function decompressLZF( inData, outLength ) {
const inLength = inData.length;
const outData = new Uint8Array( outLength );
let inPtr = 0;
let outPtr = 0;
let ctrl;
let len;
let ref;
do {
ctrl = inData[ inPtr ++ ];
if ( ctrl < ( 1 << 5 ) ) {
ctrl ++;
if ( outPtr + ctrl > outLength ) throw new Error( 'Output buffer is not large enough' );
if ( inPtr + ctrl > inLength ) throw new Error( 'Invalid compressed data' );
do {
outData[ outPtr ++ ] = inData[ inPtr ++ ];
} while ( -- ctrl );
} else {
len = ctrl >> 5;
ref = outPtr - ( ( ctrl & 0x1f ) << 8 ) - 1;
if ( inPtr >= inLength ) throw new Error( 'Invalid compressed data' );
if ( len === 7 ) {
len += inData[ inPtr ++ ];
if ( inPtr >= inLength ) throw new Error( 'Invalid compressed data' );
}
ref -= inData[ inPtr ++ ];
if ( outPtr + len + 2 > outLength ) throw new Error( 'Output buffer is not large enough' );
if ( ref < 0 ) throw new Error( 'Invalid compressed data' );
if ( ref >= outPtr ) throw new Error( 'Invalid compressed data' );
do {
outData[ outPtr ++ ] = outData[ ref ++ ];
} while ( -- len + 2 );
}
} while ( inPtr < inLength );
return outData;
}
function parseHeader( binaryData ) {
const PCDheader = {};
const buffer = new Uint8Array( binaryData );
let data = '', line = '', i = 0, end = false;
const max = buffer.length;
while ( i < max && end === false ) {
const char = String.fromCharCode( buffer[ i ++ ] );
if ( char === '\n' || char === '\r' ) {
if ( line.trim().toLowerCase().startsWith( 'data' ) ) {
end = true;
}
line = '';
} else {
line += char;
}
data += char;
}
const result1 = data.search( /[\r\n]DATA\s(\S*)\s/i );
const result2 = /[\r\n]DATA\s(\S*)\s/i.exec( data.slice( result1 - 1 ) );
PCDheader.data = result2[ 1 ];
PCDheader.headerLen = result2[ 0 ].length + result1;
PCDheader.str = data.slice( 0, PCDheader.headerLen );
// remove comments
PCDheader.str = PCDheader.str.replace( /#.*/gi, '' );
// parse
PCDheader.version = /^VERSION (.*)/im.exec( PCDheader.str );
PCDheader.fields = /^FIELDS (.*)/im.exec( PCDheader.str );
PCDheader.size = /^SIZE (.*)/im.exec( PCDheader.str );
PCDheader.type = /^TYPE (.*)/im.exec( PCDheader.str );
PCDheader.count = /^COUNT (.*)/im.exec( PCDheader.str );
PCDheader.width = /^WIDTH (.*)/im.exec( PCDheader.str );
PCDheader.height = /^HEIGHT (.*)/im.exec( PCDheader.str );
PCDheader.viewpoint = /^VIEWPOINT (.*)/im.exec( PCDheader.str );
PCDheader.points = /^POINTS (.*)/im.exec( PCDheader.str );
// evaluate
if ( PCDheader.version !== null )
PCDheader.version = parseFloat( PCDheader.version[ 1 ] );
PCDheader.fields = ( PCDheader.fields !== null ) ? PCDheader.fields[ 1 ].split( ' ' ) : [];
if ( PCDheader.type !== null )
PCDheader.type = PCDheader.type[ 1 ].split( ' ' );
if ( PCDheader.width !== null )
PCDheader.width = parseInt( PCDheader.width[ 1 ] );
if ( PCDheader.height !== null )
PCDheader.height = parseInt( PCDheader.height[ 1 ] );
if ( PCDheader.viewpoint !== null )
PCDheader.viewpoint = PCDheader.viewpoint[ 1 ];
if ( PCDheader.points !== null )
PCDheader.points = parseInt( PCDheader.points[ 1 ], 10 );
if ( PCDheader.points === null )
PCDheader.points = PCDheader.width * PCDheader.height;
if ( PCDheader.size !== null ) {
PCDheader.size = PCDheader.size[ 1 ].split( ' ' ).map( function ( x ) {
return parseInt( x, 10 );
} );
}
if ( PCDheader.count !== null ) {
PCDheader.count = PCDheader.count[ 1 ].split( ' ' ).map( function ( x ) {
return parseInt( x, 10 );
} );
} else {
PCDheader.count = [];
for ( let i = 0, l = PCDheader.fields.length; i < l; i ++ ) {
PCDheader.count.push( 1 );
}
}
PCDheader.offset = {};
let sizeSum = 0;
for ( let i = 0, l = PCDheader.fields.length; i < l; i ++ ) {
if ( PCDheader.data === 'ascii' ) {
PCDheader.offset[ PCDheader.fields[ i ] ] = i;
} else {
PCDheader.offset[ PCDheader.fields[ i ] ] = sizeSum;
sizeSum += PCDheader.size[ i ] * PCDheader.count[ i ];
}
}
// for binary only
PCDheader.rowSize = sizeSum;
return PCDheader;
}
// parse header
const PCDheader = parseHeader( data );
// parse data
const position = [];
const normal = [];
const color = [];
const intensity = [];
const label = [];
const c = new Color();
// ascii
if ( PCDheader.data === 'ascii' ) {
const offset = PCDheader.offset;
const textData = new TextDecoder().decode( data );
const pcdData = textData.slice( PCDheader.headerLen );
const lines = pcdData.split( '\n' );
for ( let i = 0, l = lines.length; i < l; i ++ ) {
if ( lines[ i ] === '' ) continue;
const line = lines[ i ].split( ' ' );
if ( offset.x !== undefined ) {
position.push( parseFloat( line[ offset.x ] ) );
position.push( parseFloat( line[ offset.y ] ) );
position.push( parseFloat( line[ offset.z ] ) );
}
if ( offset.rgb !== undefined ) {
const rgb_field_index = PCDheader.fields.findIndex( ( field ) => field === 'rgb' );
const rgb_type = PCDheader.type[ rgb_field_index ];
const float = parseFloat( line[ offset.rgb ] );
let rgb = float;
if ( rgb_type === 'F' ) {
// treat float values as int
// https://github.com/daavoo/pyntcloud/pull/204/commits/7b4205e64d5ed09abe708b2e91b615690c24d518
const farr = new Float32Array( 1 );
farr[ 0 ] = float;
rgb = new Int32Array( farr.buffer )[ 0 ];
}
const r = ( ( rgb >> 16 ) & 0x0000ff ) / 255;
const g = ( ( rgb >> 8 ) & 0x0000ff ) / 255;
const b = ( ( rgb >> 0 ) & 0x0000ff ) / 255;
c.setRGB( r, g, b, SRGBColorSpace );
color.push( c.r, c.g, c.b );
}
if ( offset.normal_x !== undefined ) {
normal.push( parseFloat( line[ offset.normal_x ] ) );
normal.push( parseFloat( line[ offset.normal_y ] ) );
normal.push( parseFloat( line[ offset.normal_z ] ) );
}
if ( offset.intensity !== undefined ) {
intensity.push( parseFloat( line[ offset.intensity ] ) );
}
if ( offset.label !== undefined ) {
label.push( parseInt( line[ offset.label ] ) );
}
}
}
// binary-compressed
// normally data in PCD files are organized as array of structures: XYZRGBXYZRGB
// binary compressed PCD files organize their data as structure of arrays: XXYYZZRGBRGB
// that requires a totally different parsing approach compared to non-compressed data
if ( PCDheader.data === 'binary_compressed' ) {
const sizes = new Uint32Array( data.slice( PCDheader.headerLen, PCDheader.headerLen + 8 ) );
const compressedSize = sizes[ 0 ];
const decompressedSize = sizes[ 1 ];
const decompressed = decompressLZF( new Uint8Array( data, PCDheader.headerLen + 8, compressedSize ), decompressedSize );
const dataview = new DataView( decompressed.buffer );
const offset = PCDheader.offset;
for ( let i = 0; i < PCDheader.points; i ++ ) {
if ( offset.x !== undefined ) {
const xIndex = PCDheader.fields.indexOf( 'x' );
const yIndex = PCDheader.fields.indexOf( 'y' );
const zIndex = PCDheader.fields.indexOf( 'z' );
position.push( this._getDataView( dataview, ( PCDheader.points * offset.x ) + PCDheader.size[ xIndex ] * i, PCDheader.type[ xIndex ], PCDheader.size[ xIndex ] ) );
position.push( this._getDataView( dataview, ( PCDheader.points * offset.y ) + PCDheader.size[ yIndex ] * i, PCDheader.type[ yIndex ], PCDheader.size[ yIndex ] ) );
position.push( this._getDataView( dataview, ( PCDheader.points * offset.z ) + PCDheader.size[ zIndex ] * i, PCDheader.type[ zIndex ], PCDheader.size[ zIndex ] ) );
}
if ( offset.rgb !== undefined ) {
const rgbIndex = PCDheader.fields.indexOf( 'rgb' );
const r = dataview.getUint8( ( PCDheader.points * offset.rgb ) + PCDheader.size[ rgbIndex ] * i + 2 ) / 255.0;
const g = dataview.getUint8( ( PCDheader.points * offset.rgb ) + PCDheader.size[ rgbIndex ] * i + 1 ) / 255.0;
const b = dataview.getUint8( ( PCDheader.points * offset.rgb ) + PCDheader.size[ rgbIndex ] * i + 0 ) / 255.0;
c.setRGB( r, g, b, SRGBColorSpace );
color.push( c.r, c.g, c.b );
}
if ( offset.normal_x !== undefined ) {
const xIndex = PCDheader.fields.indexOf( 'normal_x' );
const yIndex = PCDheader.fields.indexOf( 'normal_y' );
const zIndex = PCDheader.fields.indexOf( 'normal_z' );
normal.push( this._getDataView( dataview, ( PCDheader.points * offset.normal_x ) + PCDheader.size[ xIndex ] * i, PCDheader.type[ xIndex ], PCDheader.size[ xIndex ] ) );
normal.push( this._getDataView( dataview, ( PCDheader.points * offset.normal_y ) + PCDheader.size[ yIndex ] * i, PCDheader.type[ yIndex ], PCDheader.size[ yIndex ] ) );
normal.push( this._getDataView( dataview, ( PCDheader.points * offset.normal_z ) + PCDheader.size[ zIndex ] * i, PCDheader.type[ zIndex ], PCDheader.size[ zIndex ] ) );
}
if ( offset.intensity !== undefined ) {
const intensityIndex = PCDheader.fields.indexOf( 'intensity' );
intensity.push( this._getDataView( dataview, ( PCDheader.points * offset.intensity ) + PCDheader.size[ intensityIndex ] * i, PCDheader.type[ intensityIndex ], PCDheader.size[ intensityIndex ] ) );
}
if ( offset.label !== undefined ) {
const labelIndex = PCDheader.fields.indexOf( 'label' );
label.push( dataview.getInt32( ( PCDheader.points * offset.label ) + PCDheader.size[ labelIndex ] * i, this.littleEndian ) );
}
}
}
// binary
if ( PCDheader.data === 'binary' ) {
const dataview = new DataView( data, PCDheader.headerLen );
const offset = PCDheader.offset;
for ( let i = 0, row = 0; i < PCDheader.points; i ++, row += PCDheader.rowSize ) {
if ( offset.x !== undefined ) {
const xIndex = PCDheader.fields.indexOf( 'x' );
const yIndex = PCDheader.fields.indexOf( 'y' );
const zIndex = PCDheader.fields.indexOf( 'z' );
position.push( this._getDataView( dataview, row + offset.x, PCDheader.type[ xIndex ], PCDheader.size[ xIndex ] ) );
position.push( this._getDataView( dataview, row + offset.y, PCDheader.type[ yIndex ], PCDheader.size[ yIndex ] ) );
position.push( this._getDataView( dataview, row + offset.z, PCDheader.type[ zIndex ], PCDheader.size[ zIndex ] ) );
}
if ( offset.rgb !== undefined ) {
const r = dataview.getUint8( row + offset.rgb + 2 ) / 255.0;
const g = dataview.getUint8( row + offset.rgb + 1 ) / 255.0;
const b = dataview.getUint8( row + offset.rgb + 0 ) / 255.0;
c.setRGB( r, g, b, SRGBColorSpace );
color.push( c.r, c.g, c.b );
}
if ( offset.normal_x !== undefined ) {
const xIndex = PCDheader.fields.indexOf( 'normal_x' );
const yIndex = PCDheader.fields.indexOf( 'normal_y' );
const zIndex = PCDheader.fields.indexOf( 'normal_z' );
normal.push( this._getDataView( dataview, row + offset.normal_x, PCDheader.type[ xIndex ], PCDheader.size[ xIndex ] ) );
normal.push( this._getDataView( dataview, row + offset.normal_y, PCDheader.type[ yIndex ], PCDheader.size[ yIndex ] ) );
normal.push( this._getDataView( dataview, row + offset.normal_z, PCDheader.type[ zIndex ], PCDheader.size[ zIndex ] ) );
}
if ( offset.intensity !== undefined ) {
const intensityIndex = PCDheader.fields.indexOf( 'intensity' );
intensity.push( this._getDataView( dataview, row + offset.intensity, PCDheader.type[ intensityIndex ], PCDheader.size[ intensityIndex ] ) );
}
if ( offset.label !== undefined ) {
label.push( dataview.getInt32( row + offset.label, this.littleEndian ) );
}
}
}
// build geometry
const geometry = new BufferGeometry();
if ( position.length > 0 ) geometry.setAttribute( 'position', new Float32BufferAttribute( position, 3 ) );
if ( normal.length > 0 ) geometry.setAttribute( 'normal', new Float32BufferAttribute( normal, 3 ) );
if ( color.length > 0 ) geometry.setAttribute( 'color', new Float32BufferAttribute( color, 3 ) );
if ( intensity.length > 0 ) geometry.setAttribute( 'intensity', new Float32BufferAttribute( intensity, 1 ) );
if ( label.length > 0 ) geometry.setAttribute( 'label', new Int32BufferAttribute( label, 1 ) );
geometry.computeBoundingSphere();
// build material
const material = new PointsMaterial( { size: 0.005 } );
if ( color.length > 0 ) {
material.vertexColors = true;
}
// build point cloud
return new Points( geometry, material );
}
}
export { PCDLoader };

272
node_modules/three/examples/jsm/loaders/PDBLoader.js generated vendored Normal file
View File

@@ -0,0 +1,272 @@
import {
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Loader,
Color,
SRGBColorSpace
} from 'three';
/**
* A loader for the PDB format.
*
* The [Protein Data Bank]{@link https://en.wikipedia.org/wiki/Protein_Data_Bank_(file_format)}
* file format is a textual file describing the three-dimensional structures of molecules.
*
* ```js
* const loader = new PDBLoader();
* const pdb = await loader.loadAsync( 'models/pdb/ethanol.pdb' );
*
* const geometryAtoms = pdb.geometryAtoms;
* const geometryBonds = pdb.geometryBonds;
* const json = pdb.json;
* ```
*
* @augments Loader
* @three_import import { PDBLoader } from 'three/addons/loaders/PDBLoader.js';
*/
class PDBLoader extends Loader {
/**
* Constructs a new PDB loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded PDB asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Object)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given PDB data and returns an object holding the atoms and
* bond geometries as well as the raw atom data as JSON.
*
* @param {string} text - The raw PDB data as a string.
* @return {Object} The result object.
*/
parse( text ) {
// Based on CanvasMol PDB parser
function trim( text ) {
return text.replace( /^\s\s*/, '' ).replace( /\s\s*$/, '' );
}
function capitalize( text ) {
return text.charAt( 0 ).toUpperCase() + text.slice( 1 ).toLowerCase();
}
function hash( s, e ) {
return 's' + Math.min( s, e ) + 'e' + Math.max( s, e );
}
function parseBond( start, length, satom, i ) {
const eatom = parseInt( lines[ i ].slice( start, start + length ) );
if ( eatom ) {
const h = hash( satom, eatom );
if ( _bhash[ h ] === undefined ) {
_bonds.push( [ satom - 1, eatom - 1, 1 ] );
_bhash[ h ] = _bonds.length - 1;
} else {
// doesn't really work as almost all PDBs
// have just normal bonds appearing multiple
// times instead of being double/triple bonds
// bonds[bhash[h]][2] += 1;
}
}
}
function buildGeometry() {
const build = {
geometryAtoms: new BufferGeometry(),
geometryBonds: new BufferGeometry(),
json: {
atoms: atoms
}
};
const geometryAtoms = build.geometryAtoms;
const geometryBonds = build.geometryBonds;
const verticesAtoms = [];
const colorsAtoms = [];
const verticesBonds = [];
// atoms
const c = new Color();
for ( let i = 0, l = atoms.length; i < l; i ++ ) {
const atom = atoms[ i ];
const x = atom[ 0 ];
const y = atom[ 1 ];
const z = atom[ 2 ];
verticesAtoms.push( x, y, z );
const r = atom[ 3 ][ 0 ] / 255;
const g = atom[ 3 ][ 1 ] / 255;
const b = atom[ 3 ][ 2 ] / 255;
c.setRGB( r, g, b, SRGBColorSpace );
colorsAtoms.push( c.r, c.g, c.b );
}
// bonds
for ( let i = 0, l = _bonds.length; i < l; i ++ ) {
const bond = _bonds[ i ];
const start = bond[ 0 ];
const end = bond[ 1 ];
const startAtom = _atomMap[ start ];
const endAtom = _atomMap[ end ];
let x = startAtom[ 0 ];
let y = startAtom[ 1 ];
let z = startAtom[ 2 ];
verticesBonds.push( x, y, z );
x = endAtom[ 0 ];
y = endAtom[ 1 ];
z = endAtom[ 2 ];
verticesBonds.push( x, y, z );
}
// build geometry
geometryAtoms.setAttribute( 'position', new Float32BufferAttribute( verticesAtoms, 3 ) );
geometryAtoms.setAttribute( 'color', new Float32BufferAttribute( colorsAtoms, 3 ) );
geometryBonds.setAttribute( 'position', new Float32BufferAttribute( verticesBonds, 3 ) );
return build;
}
const CPK = { h: [ 255, 255, 255 ], he: [ 217, 255, 255 ], li: [ 204, 128, 255 ], be: [ 194, 255, 0 ], b: [ 255, 181, 181 ], c: [ 144, 144, 144 ], n: [ 48, 80, 248 ], o: [ 255, 13, 13 ], f: [ 144, 224, 80 ], ne: [ 179, 227, 245 ], na: [ 171, 92, 242 ], mg: [ 138, 255, 0 ], al: [ 191, 166, 166 ], si: [ 240, 200, 160 ], p: [ 255, 128, 0 ], s: [ 255, 255, 48 ], cl: [ 31, 240, 31 ], ar: [ 128, 209, 227 ], k: [ 143, 64, 212 ], ca: [ 61, 255, 0 ], sc: [ 230, 230, 230 ], ti: [ 191, 194, 199 ], v: [ 166, 166, 171 ], cr: [ 138, 153, 199 ], mn: [ 156, 122, 199 ], fe: [ 224, 102, 51 ], co: [ 240, 144, 160 ], ni: [ 80, 208, 80 ], cu: [ 200, 128, 51 ], zn: [ 125, 128, 176 ], ga: [ 194, 143, 143 ], ge: [ 102, 143, 143 ], as: [ 189, 128, 227 ], se: [ 255, 161, 0 ], br: [ 166, 41, 41 ], kr: [ 92, 184, 209 ], rb: [ 112, 46, 176 ], sr: [ 0, 255, 0 ], y: [ 148, 255, 255 ], zr: [ 148, 224, 224 ], nb: [ 115, 194, 201 ], mo: [ 84, 181, 181 ], tc: [ 59, 158, 158 ], ru: [ 36, 143, 143 ], rh: [ 10, 125, 140 ], pd: [ 0, 105, 133 ], ag: [ 192, 192, 192 ], cd: [ 255, 217, 143 ], in: [ 166, 117, 115 ], sn: [ 102, 128, 128 ], sb: [ 158, 99, 181 ], te: [ 212, 122, 0 ], i: [ 148, 0, 148 ], xe: [ 66, 158, 176 ], cs: [ 87, 23, 143 ], ba: [ 0, 201, 0 ], la: [ 112, 212, 255 ], ce: [ 255, 255, 199 ], pr: [ 217, 255, 199 ], nd: [ 199, 255, 199 ], pm: [ 163, 255, 199 ], sm: [ 143, 255, 199 ], eu: [ 97, 255, 199 ], gd: [ 69, 255, 199 ], tb: [ 48, 255, 199 ], dy: [ 31, 255, 199 ], ho: [ 0, 255, 156 ], er: [ 0, 230, 117 ], tm: [ 0, 212, 82 ], yb: [ 0, 191, 56 ], lu: [ 0, 171, 36 ], hf: [ 77, 194, 255 ], ta: [ 77, 166, 255 ], w: [ 33, 148, 214 ], re: [ 38, 125, 171 ], os: [ 38, 102, 150 ], ir: [ 23, 84, 135 ], pt: [ 208, 208, 224 ], au: [ 255, 209, 35 ], hg: [ 184, 184, 208 ], tl: [ 166, 84, 77 ], pb: [ 87, 89, 97 ], bi: [ 158, 79, 181 ], po: [ 171, 92, 0 ], at: [ 117, 79, 69 ], rn: [ 66, 130, 150 ], fr: [ 66, 0, 102 ], ra: [ 0, 125, 0 ], ac: [ 112, 171, 250 ], th: [ 0, 186, 255 ], pa: [ 0, 161, 255 ], u: [ 0, 143, 255 ], np: [ 0, 128, 255 ], pu: [ 0, 107, 255 ], am: [ 84, 92, 242 ], cm: [ 120, 92, 227 ], bk: [ 138, 79, 227 ], cf: [ 161, 54, 212 ], es: [ 179, 31, 212 ], fm: [ 179, 31, 186 ], md: [ 179, 13, 166 ], no: [ 189, 13, 135 ], lr: [ 199, 0, 102 ], rf: [ 204, 0, 89 ], db: [ 209, 0, 79 ], sg: [ 217, 0, 69 ], bh: [ 224, 0, 56 ], hs: [ 230, 0, 46 ], mt: [ 235, 0, 38 ], ds: [ 235, 0, 38 ], rg: [ 235, 0, 38 ], cn: [ 235, 0, 38 ], uut: [ 235, 0, 38 ], uuq: [ 235, 0, 38 ], uup: [ 235, 0, 38 ], uuh: [ 235, 0, 38 ], uus: [ 235, 0, 38 ], uuo: [ 235, 0, 38 ] };
const atoms = [];
const _bonds = [];
const _bhash = {};
const _atomMap = {};
// parse
const lines = text.split( '\n' );
for ( let i = 0, l = lines.length; i < l; i ++ ) {
if ( lines[ i ].slice( 0, 4 ) === 'ATOM' || lines[ i ].slice( 0, 6 ) === 'HETATM' ) {
const x = parseFloat( lines[ i ].slice( 30, 37 ) );
const y = parseFloat( lines[ i ].slice( 38, 45 ) );
const z = parseFloat( lines[ i ].slice( 46, 53 ) );
const index = parseInt( lines[ i ].slice( 6, 11 ) ) - 1;
let e = trim( lines[ i ].slice( 76, 78 ) ).toLowerCase();
if ( e === '' ) {
e = trim( lines[ i ].slice( 12, 14 ) ).toLowerCase();
}
const atomData = [ x, y, z, CPK[ e ], capitalize( e ) ];
atoms.push( atomData );
_atomMap[ index ] = atomData;
} else if ( lines[ i ].slice( 0, 6 ) === 'CONECT' ) {
const satom = parseInt( lines[ i ].slice( 6, 11 ) );
parseBond( 11, 5, satom, i );
parseBond( 16, 5, satom, i );
parseBond( 21, 5, satom, i );
parseBond( 26, 5, satom, i );
}
}
// build and return geometry
return buildGeometry();
}
}
export { PDBLoader };

805
node_modules/three/examples/jsm/loaders/PLYLoader.js generated vendored Normal file
View File

@@ -0,0 +1,805 @@
import {
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Loader,
Color,
SRGBColorSpace
} from 'three';
const _color = new Color();
/**
* A loader for PLY the PLY format (known as the Polygon
* File Format or the Stanford Triangle Format).
*
* Limitations:
* - ASCII decoding assumes file is UTF-8.
*
* ```js
* const loader = new PLYLoader();
* const geometry = await loader.loadAsync( './models/ply/ascii/dolphins.ply' );
* scene.add( new THREE.Mesh( geometry ) );
* ```
*
* @augments Loader
* @three_import import { PLYLoader } from 'three/addons/loaders/PLYLoader.js';
*/
class PLYLoader extends Loader {
/**
* Constructs a new PLY loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
// internals
this.propertyNameMapping = {};
this.customPropertyMapping = {};
}
/**
* Starts loading from the given URL and passes the loaded PLY asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(BufferGeometry)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Sets a property name mapping that maps default property names
* to custom ones. For example, the following maps the properties
* “diffuse_(red|green|blue)” in the file to standard color names.
*
* ```js
* loader.setPropertyNameMapping( {
* diffuse_red: 'red',
* diffuse_green: 'green',
* diffuse_blue: 'blue'
* } );
* ```
*
* @param {Object} mapping - The mapping dictionary.
*/
setPropertyNameMapping( mapping ) {
this.propertyNameMapping = mapping;
}
/**
* Custom properties outside of the defaults for position, uv, normal
* and color attributes can be added using the setCustomPropertyNameMapping method.
* For example, the following maps the element properties “custom_property_a”
* and “custom_property_b” to an attribute “customAttribute” with an item size of 2.
* Attribute item sizes are set from the number of element properties in the property array.
*
* ```js
* loader.setCustomPropertyNameMapping( {
* customAttribute: ['custom_property_a', 'custom_property_b'],
* } );
* ```
* @param {Object} mapping - The mapping dictionary.
*/
setCustomPropertyNameMapping( mapping ) {
this.customPropertyMapping = mapping;
}
/**
* Parses the given PLY data and returns the resulting geometry.
*
* @param {ArrayBuffer} data - The raw PLY data as an array buffer.
* @return {BufferGeometry} The parsed geometry.
*/
parse( data ) {
function parseHeader( data, headerLength = 0 ) {
const patternHeader = /^ply([\s\S]*)end_header(\r\n|\r|\n)/;
let headerText = '';
const result = patternHeader.exec( data );
if ( result !== null ) {
headerText = result[ 1 ];
}
const header = {
comments: [],
elements: [],
headerLength: headerLength,
objInfo: ''
};
const lines = headerText.split( /\r\n|\r|\n/ );
let currentElement;
function make_ply_element_property( propertyValues, propertyNameMapping ) {
const property = { type: propertyValues[ 0 ] };
if ( property.type === 'list' ) {
property.name = propertyValues[ 3 ];
property.countType = propertyValues[ 1 ];
property.itemType = propertyValues[ 2 ];
} else {
property.name = propertyValues[ 1 ];
}
if ( property.name in propertyNameMapping ) {
property.name = propertyNameMapping[ property.name ];
}
return property;
}
for ( let i = 0; i < lines.length; i ++ ) {
let line = lines[ i ];
line = line.trim();
if ( line === '' ) continue;
const lineValues = line.split( /\s+/ );
const lineType = lineValues.shift();
line = lineValues.join( ' ' );
switch ( lineType ) {
case 'format':
header.format = lineValues[ 0 ];
header.version = lineValues[ 1 ];
break;
case 'comment':
header.comments.push( line );
break;
case 'element':
if ( currentElement !== undefined ) {
header.elements.push( currentElement );
}
currentElement = {};
currentElement.name = lineValues[ 0 ];
currentElement.count = parseInt( lineValues[ 1 ] );
currentElement.properties = [];
break;
case 'property':
currentElement.properties.push( make_ply_element_property( lineValues, scope.propertyNameMapping ) );
break;
case 'obj_info':
header.objInfo = line;
break;
default:
console.log( 'unhandled', lineType, lineValues );
}
}
if ( currentElement !== undefined ) {
header.elements.push( currentElement );
}
return header;
}
function parseASCIINumber( n, type ) {
switch ( type ) {
case 'char': case 'uchar': case 'short': case 'ushort': case 'int': case 'uint':
case 'int8': case 'uint8': case 'int16': case 'uint16': case 'int32': case 'uint32':
return parseInt( n );
case 'float': case 'double': case 'float32': case 'float64':
return parseFloat( n );
}
}
function parseASCIIElement( properties, tokens ) {
const element = {};
for ( let i = 0; i < properties.length; i ++ ) {
if ( tokens.empty() ) return null;
if ( properties[ i ].type === 'list' ) {
const list = [];
const n = parseASCIINumber( tokens.next(), properties[ i ].countType );
for ( let j = 0; j < n; j ++ ) {
if ( tokens.empty() ) return null;
list.push( parseASCIINumber( tokens.next(), properties[ i ].itemType ) );
}
element[ properties[ i ].name ] = list;
} else {
element[ properties[ i ].name ] = parseASCIINumber( tokens.next(), properties[ i ].type );
}
}
return element;
}
function createBuffer() {
const buffer = {
indices: [],
vertices: [],
normals: [],
uvs: [],
faceVertexUvs: [],
colors: [],
faceVertexColors: []
};
for ( const customProperty of Object.keys( scope.customPropertyMapping ) ) {
buffer[ customProperty ] = [];
}
return buffer;
}
function mapElementAttributes( properties ) {
const elementNames = properties.map( property => {
return property.name;
} );
function findAttrName( names ) {
for ( let i = 0, l = names.length; i < l; i ++ ) {
const name = names[ i ];
if ( elementNames.includes( name ) ) return name;
}
return null;
}
return {
attrX: findAttrName( [ 'x', 'px', 'posx' ] ) || 'x',
attrY: findAttrName( [ 'y', 'py', 'posy' ] ) || 'y',
attrZ: findAttrName( [ 'z', 'pz', 'posz' ] ) || 'z',
attrNX: findAttrName( [ 'nx', 'normalx' ] ),
attrNY: findAttrName( [ 'ny', 'normaly' ] ),
attrNZ: findAttrName( [ 'nz', 'normalz' ] ),
attrS: findAttrName( [ 's', 'u', 'texture_u', 'tx' ] ),
attrT: findAttrName( [ 't', 'v', 'texture_v', 'ty' ] ),
attrR: findAttrName( [ 'red', 'diffuse_red', 'r', 'diffuse_r' ] ),
attrG: findAttrName( [ 'green', 'diffuse_green', 'g', 'diffuse_g' ] ),
attrB: findAttrName( [ 'blue', 'diffuse_blue', 'b', 'diffuse_b' ] ),
};
}
function parseASCII( data, header ) {
// PLY ascii format specification, as per http://en.wikipedia.org/wiki/PLY_(file_format)
const buffer = createBuffer();
const patternBody = /end_header\s+(\S[\s\S]*\S|\S)\s*$/;
let body, matches;
if ( ( matches = patternBody.exec( data ) ) !== null ) {
body = matches[ 1 ].split( /\s+/ );
} else {
body = [ ];
}
const tokens = new ArrayStream( body );
loop: for ( let i = 0; i < header.elements.length; i ++ ) {
const elementDesc = header.elements[ i ];
const attributeMap = mapElementAttributes( elementDesc.properties );
for ( let j = 0; j < elementDesc.count; j ++ ) {
const element = parseASCIIElement( elementDesc.properties, tokens );
if ( ! element ) break loop;
handleElement( buffer, elementDesc.name, element, attributeMap );
}
}
return postProcess( buffer );
}
function postProcess( buffer ) {
let geometry = new BufferGeometry();
// mandatory buffer data
if ( buffer.indices.length > 0 ) {
geometry.setIndex( buffer.indices );
}
geometry.setAttribute( 'position', new Float32BufferAttribute( buffer.vertices, 3 ) );
// optional buffer data
if ( buffer.normals.length > 0 ) {
geometry.setAttribute( 'normal', new Float32BufferAttribute( buffer.normals, 3 ) );
}
if ( buffer.uvs.length > 0 ) {
geometry.setAttribute( 'uv', new Float32BufferAttribute( buffer.uvs, 2 ) );
}
if ( buffer.colors.length > 0 ) {
geometry.setAttribute( 'color', new Float32BufferAttribute( buffer.colors, 3 ) );
}
if ( buffer.faceVertexUvs.length > 0 || buffer.faceVertexColors.length > 0 ) {
geometry = geometry.toNonIndexed();
if ( buffer.faceVertexUvs.length > 0 ) geometry.setAttribute( 'uv', new Float32BufferAttribute( buffer.faceVertexUvs, 2 ) );
if ( buffer.faceVertexColors.length > 0 ) geometry.setAttribute( 'color', new Float32BufferAttribute( buffer.faceVertexColors, 3 ) );
}
// custom buffer data
for ( const customProperty of Object.keys( scope.customPropertyMapping ) ) {
if ( buffer[ customProperty ].length > 0 ) {
geometry.setAttribute(
customProperty,
new Float32BufferAttribute(
buffer[ customProperty ],
scope.customPropertyMapping[ customProperty ].length
)
);
}
}
geometry.computeBoundingSphere();
return geometry;
}
function handleElement( buffer, elementName, element, cacheEntry ) {
if ( elementName === 'vertex' ) {
buffer.vertices.push( element[ cacheEntry.attrX ], element[ cacheEntry.attrY ], element[ cacheEntry.attrZ ] );
if ( cacheEntry.attrNX !== null && cacheEntry.attrNY !== null && cacheEntry.attrNZ !== null ) {
buffer.normals.push( element[ cacheEntry.attrNX ], element[ cacheEntry.attrNY ], element[ cacheEntry.attrNZ ] );
}
if ( cacheEntry.attrS !== null && cacheEntry.attrT !== null ) {
buffer.uvs.push( element[ cacheEntry.attrS ], element[ cacheEntry.attrT ] );
}
if ( cacheEntry.attrR !== null && cacheEntry.attrG !== null && cacheEntry.attrB !== null ) {
_color.setRGB(
element[ cacheEntry.attrR ] / 255.0,
element[ cacheEntry.attrG ] / 255.0,
element[ cacheEntry.attrB ] / 255.0,
SRGBColorSpace
);
buffer.colors.push( _color.r, _color.g, _color.b );
}
for ( const customProperty of Object.keys( scope.customPropertyMapping ) ) {
for ( const elementProperty of scope.customPropertyMapping[ customProperty ] ) {
buffer[ customProperty ].push( element[ elementProperty ] );
}
}
} else if ( elementName === 'face' ) {
const vertex_indices = element.vertex_indices || element.vertex_index; // issue #9338
const texcoord = element.texcoord;
if ( vertex_indices.length === 3 ) {
buffer.indices.push( vertex_indices[ 0 ], vertex_indices[ 1 ], vertex_indices[ 2 ] );
if ( texcoord && texcoord.length === 6 ) {
buffer.faceVertexUvs.push( texcoord[ 0 ], texcoord[ 1 ] );
buffer.faceVertexUvs.push( texcoord[ 2 ], texcoord[ 3 ] );
buffer.faceVertexUvs.push( texcoord[ 4 ], texcoord[ 5 ] );
}
} else if ( vertex_indices.length === 4 ) {
buffer.indices.push( vertex_indices[ 0 ], vertex_indices[ 1 ], vertex_indices[ 3 ] );
buffer.indices.push( vertex_indices[ 1 ], vertex_indices[ 2 ], vertex_indices[ 3 ] );
}
// face colors
if ( cacheEntry.attrR !== null && cacheEntry.attrG !== null && cacheEntry.attrB !== null ) {
_color.setRGB(
element[ cacheEntry.attrR ] / 255.0,
element[ cacheEntry.attrG ] / 255.0,
element[ cacheEntry.attrB ] / 255.0,
SRGBColorSpace
);
buffer.faceVertexColors.push( _color.r, _color.g, _color.b );
buffer.faceVertexColors.push( _color.r, _color.g, _color.b );
buffer.faceVertexColors.push( _color.r, _color.g, _color.b );
}
}
}
function binaryReadElement( at, properties ) {
const element = {};
let read = 0;
for ( let i = 0; i < properties.length; i ++ ) {
const property = properties[ i ];
const valueReader = property.valueReader;
if ( property.type === 'list' ) {
const list = [];
const n = property.countReader.read( at + read );
read += property.countReader.size;
for ( let j = 0; j < n; j ++ ) {
list.push( valueReader.read( at + read ) );
read += valueReader.size;
}
element[ property.name ] = list;
} else {
element[ property.name ] = valueReader.read( at + read );
read += valueReader.size;
}
}
return [ element, read ];
}
function setPropertyBinaryReaders( properties, body, little_endian ) {
function getBinaryReader( dataview, type, little_endian ) {
switch ( type ) {
// correspondences for non-specific length types here match rply:
case 'int8': case 'char': return { read: ( at ) => {
return dataview.getInt8( at );
}, size: 1 };
case 'uint8': case 'uchar': return { read: ( at ) => {
return dataview.getUint8( at );
}, size: 1 };
case 'int16': case 'short': return { read: ( at ) => {
return dataview.getInt16( at, little_endian );
}, size: 2 };
case 'uint16': case 'ushort': return { read: ( at ) => {
return dataview.getUint16( at, little_endian );
}, size: 2 };
case 'int32': case 'int': return { read: ( at ) => {
return dataview.getInt32( at, little_endian );
}, size: 4 };
case 'uint32': case 'uint': return { read: ( at ) => {
return dataview.getUint32( at, little_endian );
}, size: 4 };
case 'float32': case 'float': return { read: ( at ) => {
return dataview.getFloat32( at, little_endian );
}, size: 4 };
case 'float64': case 'double': return { read: ( at ) => {
return dataview.getFloat64( at, little_endian );
}, size: 8 };
}
}
for ( let i = 0, l = properties.length; i < l; i ++ ) {
const property = properties[ i ];
if ( property.type === 'list' ) {
property.countReader = getBinaryReader( body, property.countType, little_endian );
property.valueReader = getBinaryReader( body, property.itemType, little_endian );
} else {
property.valueReader = getBinaryReader( body, property.type, little_endian );
}
}
}
function parseBinary( data, header ) {
const buffer = createBuffer();
const little_endian = ( header.format === 'binary_little_endian' );
const body = new DataView( data, header.headerLength );
let result, loc = 0;
for ( let currentElement = 0; currentElement < header.elements.length; currentElement ++ ) {
const elementDesc = header.elements[ currentElement ];
const properties = elementDesc.properties;
const attributeMap = mapElementAttributes( properties );
setPropertyBinaryReaders( properties, body, little_endian );
for ( let currentElementCount = 0; currentElementCount < elementDesc.count; currentElementCount ++ ) {
result = binaryReadElement( loc, properties );
loc += result[ 1 ];
const element = result[ 0 ];
handleElement( buffer, elementDesc.name, element, attributeMap );
}
}
return postProcess( buffer );
}
function extractHeaderText( bytes ) {
let i = 0;
let cont = true;
let line = '';
const lines = [];
const startLine = new TextDecoder().decode( bytes.subarray( 0, 5 ) );
const hasCRNL = /^ply\r\n/.test( startLine );
do {
const c = String.fromCharCode( bytes[ i ++ ] );
if ( c !== '\n' && c !== '\r' ) {
line += c;
} else {
if ( line === 'end_header' ) cont = false;
if ( line !== '' ) {
lines.push( line );
line = '';
}
}
} while ( cont && i < bytes.length );
// ascii section using \r\n as line endings
if ( hasCRNL === true ) i ++;
return { headerText: lines.join( '\r' ) + '\r', headerLength: i };
}
//
let geometry;
const scope = this;
if ( data instanceof ArrayBuffer ) {
const bytes = new Uint8Array( data );
const { headerText, headerLength } = extractHeaderText( bytes );
const header = parseHeader( headerText, headerLength );
if ( header.format === 'ascii' ) {
const text = new TextDecoder().decode( bytes );
geometry = parseASCII( text, header );
} else {
geometry = parseBinary( data, header );
}
} else {
geometry = parseASCII( data, parseHeader( data ) );
}
return geometry;
}
}
class ArrayStream {
constructor( arr ) {
this.arr = arr;
this.i = 0;
}
empty() {
return this.i >= this.arr.length;
}
next() {
return this.arr[ this.i ++ ];
}
}
export { PLYLoader };

270
node_modules/three/examples/jsm/loaders/PVRLoader.js generated vendored Normal file
View File

@@ -0,0 +1,270 @@
import {
CompressedTextureLoader,
RGBA_PVRTC_2BPPV1_Format,
RGBA_PVRTC_4BPPV1_Format,
RGB_PVRTC_2BPPV1_Format,
RGB_PVRTC_4BPPV1_Format
} from 'three';
/**
* A loader for the PVRTC texture compression format.
*
* ```js
* const loader = new PVRLoader();
*
* const map = loader.load( 'textures/compressed/disturb_4bpp_rgb.pvr' );
* map.colorSpace = THREE.SRGBColorSpace; // only for color textures
* ```
*
* @augments CompressedTextureLoader
* @three_import import { PVRLoader } from 'three/addons/loaders/PVRLoader.js';
*/
class PVRLoader extends CompressedTextureLoader {
/**
* Constructs a new PVR loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Parses the given PVRTC texture data.
*
* @param {ArrayBuffer} buffer - The raw texture data.
* @param {boolean} loadMipmaps - Whether to load mipmaps or not. This option is not yet supported by the loader.
* @return {CompressedTextureLoader~TexData} An object representing the parsed texture data.
*/
parse( buffer, loadMipmaps ) {
const headerLengthInt = 13;
const header = new Uint32Array( buffer, 0, headerLengthInt );
const pvrDatas = {
buffer: buffer,
header: header,
loadMipmaps: loadMipmaps
};
if ( header[ 0 ] === 0x03525650 ) {
// PVR v3
return _parseV3( pvrDatas );
} else if ( header[ 11 ] === 0x21525650 ) {
// PVR v2
return _parseV2( pvrDatas );
} else {
console.error( 'THREE.PVRLoader: Unknown PVR format.' );
}
}
}
function _parseV3( pvrDatas ) {
const header = pvrDatas.header;
let bpp, format;
const metaLen = header[ 12 ],
pixelFormat = header[ 2 ],
height = header[ 6 ],
width = header[ 7 ],
// numSurfs = header[ 9 ],
numFaces = header[ 10 ],
numMipmaps = header[ 11 ];
switch ( pixelFormat ) {
case 0 : // PVRTC 2bpp RGB
bpp = 2;
format = RGB_PVRTC_2BPPV1_Format;
break;
case 1 : // PVRTC 2bpp RGBA
bpp = 2;
format = RGBA_PVRTC_2BPPV1_Format;
break;
case 2 : // PVRTC 4bpp RGB
bpp = 4;
format = RGB_PVRTC_4BPPV1_Format;
break;
case 3 : // PVRTC 4bpp RGBA
bpp = 4;
format = RGBA_PVRTC_4BPPV1_Format;
break;
default :
console.error( 'THREE.PVRLoader: Unsupported PVR format:', pixelFormat );
}
pvrDatas.dataPtr = 52 + metaLen;
pvrDatas.bpp = bpp;
pvrDatas.format = format;
pvrDatas.width = width;
pvrDatas.height = height;
pvrDatas.numSurfaces = numFaces;
pvrDatas.numMipmaps = numMipmaps;
pvrDatas.isCubemap = ( numFaces === 6 );
return _extract( pvrDatas );
}
function _parseV2( pvrDatas ) {
const header = pvrDatas.header;
const headerLength = header[ 0 ],
height = header[ 1 ],
width = header[ 2 ],
numMipmaps = header[ 3 ],
flags = header[ 4 ],
// dataLength = header[ 5 ],
// bpp = header[ 6 ],
// bitmaskRed = header[ 7 ],
// bitmaskGreen = header[ 8 ],
// bitmaskBlue = header[ 9 ],
bitmaskAlpha = header[ 10 ],
// pvrTag = header[ 11 ],
numSurfs = header[ 12 ];
const TYPE_MASK = 0xff;
const PVRTC_2 = 24,
PVRTC_4 = 25;
const formatFlags = flags & TYPE_MASK;
let bpp, format;
const _hasAlpha = bitmaskAlpha > 0;
if ( formatFlags === PVRTC_4 ) {
format = _hasAlpha ? RGBA_PVRTC_4BPPV1_Format : RGB_PVRTC_4BPPV1_Format;
bpp = 4;
} else if ( formatFlags === PVRTC_2 ) {
format = _hasAlpha ? RGBA_PVRTC_2BPPV1_Format : RGB_PVRTC_2BPPV1_Format;
bpp = 2;
} else {
console.error( 'THREE.PVRLoader: Unknown PVR format:', formatFlags );
}
pvrDatas.dataPtr = headerLength;
pvrDatas.bpp = bpp;
pvrDatas.format = format;
pvrDatas.width = width;
pvrDatas.height = height;
pvrDatas.numSurfaces = numSurfs;
pvrDatas.numMipmaps = numMipmaps + 1;
// guess cubemap type seems tricky in v2
// it's just a pvr containing 6 surface (no explicit cubemap type)
pvrDatas.isCubemap = ( numSurfs === 6 );
return _extract( pvrDatas );
}
function _extract( pvrDatas ) {
const pvr = {
mipmaps: [],
width: pvrDatas.width,
height: pvrDatas.height,
format: pvrDatas.format,
mipmapCount: pvrDatas.numMipmaps,
isCubemap: pvrDatas.isCubemap
};
const buffer = pvrDatas.buffer;
let dataOffset = pvrDatas.dataPtr,
dataSize = 0,
blockSize = 0,
blockWidth = 0,
blockHeight = 0,
widthBlocks = 0,
heightBlocks = 0;
const bpp = pvrDatas.bpp,
numSurfs = pvrDatas.numSurfaces;
if ( bpp === 2 ) {
blockWidth = 8;
blockHeight = 4;
} else {
blockWidth = 4;
blockHeight = 4;
}
blockSize = ( blockWidth * blockHeight ) * bpp / 8;
pvr.mipmaps.length = pvrDatas.numMipmaps * numSurfs;
let mipLevel = 0;
while ( mipLevel < pvrDatas.numMipmaps ) {
const sWidth = pvrDatas.width >> mipLevel,
sHeight = pvrDatas.height >> mipLevel;
widthBlocks = sWidth / blockWidth;
heightBlocks = sHeight / blockHeight;
// Clamp to minimum number of blocks
if ( widthBlocks < 2 ) widthBlocks = 2;
if ( heightBlocks < 2 ) heightBlocks = 2;
dataSize = widthBlocks * heightBlocks * blockSize;
for ( let surfIndex = 0; surfIndex < numSurfs; surfIndex ++ ) {
const byteArray = new Uint8Array( buffer, dataOffset, dataSize );
const mipmap = {
data: byteArray,
width: sWidth,
height: sHeight
};
pvr.mipmaps[ surfIndex * pvrDatas.numMipmaps + mipLevel ] = mipmap;
dataOffset += dataSize;
}
mipLevel ++;
}
return pvr;
}
export { PVRLoader };

18
node_modules/three/examples/jsm/loaders/RGBELoader.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
import { HDRLoader } from './HDRLoader.js';
// @deprecated, r180
class RGBELoader extends HDRLoader {
constructor( manager ) {
console.warn( 'RGBELoader has been deprecated. Please use HDRLoader instead.' );
super( manager );
}
}
export { RGBELoader };

421
node_modules/three/examples/jsm/loaders/STLLoader.js generated vendored Normal file
View File

@@ -0,0 +1,421 @@
import {
BufferAttribute,
BufferGeometry,
Color,
FileLoader,
Float32BufferAttribute,
Loader,
Vector3,
SRGBColorSpace
} from 'three';
/**
* A loader for the STL format, as created by Solidworks and other CAD programs.
*
* Supports both binary and ASCII encoded files. The loader returns a non-indexed buffer geometry.
*
* Limitations:
* - Binary decoding supports "Magics" color format (http://en.wikipedia.org/wiki/STL_(file_format)#Color_in_binary_STL).
* - There is perhaps some question as to how valid it is to always assume little-endian-ness.
* - ASCII decoding assumes file is UTF-8.
*
* ```js
* const loader = new STLLoader();
* const geometry = await loader.loadAsync( './models/stl/slotted_disk.stl' )
* scene.add( new THREE.Mesh( geometry ) );
* ```
* For binary STLs geometry might contain colors for vertices. To use it:
* ```js
* // use the same code to load STL as above
* if ( geometry.hasColors ) {
* material = new THREE.MeshPhongMaterial( { opacity: geometry.alpha, vertexColors: true } );
* }
* const mesh = new THREE.Mesh( geometry, material );
* ```
* For ASCII STLs containing multiple solids, each solid is assigned to a different group.
* Groups can be used to assign a different color by defining an array of materials with the same length of
* geometry.groups and passing it to the Mesh constructor:
*
* ```js
* const materials = [];
* const nGeometryGroups = geometry.groups.length;
*
* for ( let i = 0; i < nGeometryGroups; i ++ ) {
* const material = new THREE.MeshPhongMaterial( { color: colorMap[ i ], wireframe: false } );
* materials.push( material );
* }
*
* const mesh = new THREE.Mesh(geometry, materials);
* ```
*
* @augments Loader
* @three_import import { STLLoader } from 'three/addons/loaders/STLLoader.js';
*/
class STLLoader extends Loader {
/**
* Constructs a new STL loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded STL asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(BufferGeometry)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given STL data and returns the resulting geometry.
*
* @param {ArrayBuffer} data - The raw STL data as an array buffer.
* @return {BufferGeometry} The parsed geometry.
*/
parse( data ) {
function isBinary( data ) {
const reader = new DataView( data );
const face_size = ( 32 / 8 * 3 ) + ( ( 32 / 8 * 3 ) * 3 ) + ( 16 / 8 );
const n_faces = reader.getUint32( 80, true );
const expect = 80 + ( 32 / 8 ) + ( n_faces * face_size );
if ( expect === reader.byteLength ) {
return true;
}
// An ASCII STL data must begin with 'solid ' as the first six bytes.
// However, ASCII STLs lacking the SPACE after the 'd' are known to be
// plentiful. So, check the first 5 bytes for 'solid'.
// Several encodings, such as UTF-8, precede the text with up to 5 bytes:
// https://en.wikipedia.org/wiki/Byte_order_mark#Byte_order_marks_by_encoding
// Search for "solid" to start anywhere after those prefixes.
// US-ASCII ordinal values for 's', 'o', 'l', 'i', 'd'
const solid = [ 115, 111, 108, 105, 100 ];
for ( let off = 0; off < 5; off ++ ) {
// If "solid" text is matched to the current offset, declare it to be an ASCII STL.
if ( matchDataViewAt( solid, reader, off ) ) return false;
}
// Couldn't find "solid" text at the beginning; it is binary STL.
return true;
}
function matchDataViewAt( query, reader, offset ) {
// Check if each byte in query matches the corresponding byte from the current offset
for ( let i = 0, il = query.length; i < il; i ++ ) {
if ( query[ i ] !== reader.getUint8( offset + i ) ) return false;
}
return true;
}
function parseBinary( data ) {
const reader = new DataView( data );
const faces = reader.getUint32( 80, true );
let r, g, b, hasColors = false, colors;
let defaultR, defaultG, defaultB, alpha;
// process STL header
// check for default color in header ("COLOR=rgba" sequence).
for ( let index = 0; index < 80 - 10; index ++ ) {
if ( ( reader.getUint32( index, false ) == 0x434F4C4F /*COLO*/ ) &&
( reader.getUint8( index + 4 ) == 0x52 /*'R'*/ ) &&
( reader.getUint8( index + 5 ) == 0x3D /*'='*/ ) ) {
hasColors = true;
colors = new Float32Array( faces * 3 * 3 );
defaultR = reader.getUint8( index + 6 ) / 255;
defaultG = reader.getUint8( index + 7 ) / 255;
defaultB = reader.getUint8( index + 8 ) / 255;
alpha = reader.getUint8( index + 9 ) / 255;
}
}
const dataOffset = 84;
const faceLength = 12 * 4 + 2;
const geometry = new BufferGeometry();
const vertices = new Float32Array( faces * 3 * 3 );
const normals = new Float32Array( faces * 3 * 3 );
const color = new Color();
for ( let face = 0; face < faces; face ++ ) {
const start = dataOffset + face * faceLength;
const normalX = reader.getFloat32( start, true );
const normalY = reader.getFloat32( start + 4, true );
const normalZ = reader.getFloat32( start + 8, true );
if ( hasColors ) {
const packedColor = reader.getUint16( start + 48, true );
if ( ( packedColor & 0x8000 ) === 0 ) {
// facet has its own unique color
r = ( packedColor & 0x1F ) / 31;
g = ( ( packedColor >> 5 ) & 0x1F ) / 31;
b = ( ( packedColor >> 10 ) & 0x1F ) / 31;
} else {
r = defaultR;
g = defaultG;
b = defaultB;
}
}
for ( let i = 1; i <= 3; i ++ ) {
const vertexstart = start + i * 12;
const componentIdx = ( face * 3 * 3 ) + ( ( i - 1 ) * 3 );
vertices[ componentIdx ] = reader.getFloat32( vertexstart, true );
vertices[ componentIdx + 1 ] = reader.getFloat32( vertexstart + 4, true );
vertices[ componentIdx + 2 ] = reader.getFloat32( vertexstart + 8, true );
normals[ componentIdx ] = normalX;
normals[ componentIdx + 1 ] = normalY;
normals[ componentIdx + 2 ] = normalZ;
if ( hasColors ) {
color.setRGB( r, g, b, SRGBColorSpace );
colors[ componentIdx ] = color.r;
colors[ componentIdx + 1 ] = color.g;
colors[ componentIdx + 2 ] = color.b;
}
}
}
geometry.setAttribute( 'position', new BufferAttribute( vertices, 3 ) );
geometry.setAttribute( 'normal', new BufferAttribute( normals, 3 ) );
if ( hasColors ) {
geometry.setAttribute( 'color', new BufferAttribute( colors, 3 ) );
geometry.hasColors = true;
geometry.alpha = alpha;
}
return geometry;
}
function parseASCII( data ) {
const geometry = new BufferGeometry();
const patternSolid = /solid([\s\S]*?)endsolid/g;
const patternFace = /facet([\s\S]*?)endfacet/g;
const patternName = /solid\s(.+)/;
let faceCounter = 0;
const patternFloat = /[\s]+([+-]?(?:\d*)(?:\.\d*)?(?:[eE][+-]?\d+)?)/.source;
const patternVertex = new RegExp( 'vertex' + patternFloat + patternFloat + patternFloat, 'g' );
const patternNormal = new RegExp( 'normal' + patternFloat + patternFloat + patternFloat, 'g' );
const vertices = [];
const normals = [];
const groupNames = [];
const normal = new Vector3();
let result;
let groupCount = 0;
let startVertex = 0;
let endVertex = 0;
while ( ( result = patternSolid.exec( data ) ) !== null ) {
startVertex = endVertex;
const solid = result[ 0 ];
const name = ( result = patternName.exec( solid ) ) !== null ? result[ 1 ] : '';
groupNames.push( name );
while ( ( result = patternFace.exec( solid ) ) !== null ) {
let vertexCountPerFace = 0;
let normalCountPerFace = 0;
const text = result[ 0 ];
while ( ( result = patternNormal.exec( text ) ) !== null ) {
normal.x = parseFloat( result[ 1 ] );
normal.y = parseFloat( result[ 2 ] );
normal.z = parseFloat( result[ 3 ] );
normalCountPerFace ++;
}
while ( ( result = patternVertex.exec( text ) ) !== null ) {
vertices.push( parseFloat( result[ 1 ] ), parseFloat( result[ 2 ] ), parseFloat( result[ 3 ] ) );
normals.push( normal.x, normal.y, normal.z );
vertexCountPerFace ++;
endVertex ++;
}
// every face have to own ONE valid normal
if ( normalCountPerFace !== 1 ) {
console.error( 'THREE.STLLoader: Something isn\'t right with the normal of face number ' + faceCounter );
}
// each face have to own THREE valid vertices
if ( vertexCountPerFace !== 3 ) {
console.error( 'THREE.STLLoader: Something isn\'t right with the vertices of face number ' + faceCounter );
}
faceCounter ++;
}
const start = startVertex;
const count = endVertex - startVertex;
geometry.userData.groupNames = groupNames;
geometry.addGroup( start, count, groupCount );
groupCount ++;
}
geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
geometry.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
return geometry;
}
function ensureString( buffer ) {
if ( typeof buffer !== 'string' ) {
return new TextDecoder().decode( buffer );
}
return buffer;
}
function ensureBinary( buffer ) {
if ( typeof buffer === 'string' ) {
const array_buffer = new Uint8Array( buffer.length );
for ( let i = 0; i < buffer.length; i ++ ) {
array_buffer[ i ] = buffer.charCodeAt( i ) & 0xff; // implicitly assumes little-endian
}
return array_buffer.buffer || array_buffer;
} else {
return buffer;
}
}
// start
const binData = ensureBinary( data );
return isBinary( binData ) ? parseBinary( binData ) : parseASCII( ensureString( data ) );
}
}
export { STLLoader };

3267
node_modules/three/examples/jsm/loaders/SVGLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1144
node_modules/three/examples/jsm/loaders/TDSLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

540
node_modules/three/examples/jsm/loaders/TGALoader.js generated vendored Normal file
View File

@@ -0,0 +1,540 @@
import {
DataTextureLoader,
LinearMipmapLinearFilter
} from 'three';
/**
* A loader for the TGA texture format.
*
* ```js
* const loader = new TGALoader();
* const texture = await loader.loadAsync( 'textures/crate_color8.tga' );
* texture.colorSpace = THREE.SRGBColorSpace; // only for color textures
* ```
*
* @augments DataTextureLoader
* @three_import import { TGALoader } from 'three/addons/loaders/TGALoader.js';
*/
class TGALoader extends DataTextureLoader {
/**
* Constructs a new TGA loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Parses the given TGA texture data.
*
* @param {ArrayBuffer} buffer - The raw texture data.
* @return {DataTextureLoader~TexData} An object representing the parsed texture data.
*/
parse( buffer ) {
// reference from vthibault, https://github.com/vthibault/roBrowser/blob/master/src/Loaders/Targa.js
function tgaCheckHeader( header ) {
switch ( header.image_type ) {
// check indexed type
case TGA_TYPE_INDEXED:
case TGA_TYPE_RLE_INDEXED:
if ( header.colormap_length > 256 || header.colormap_size !== 24 || header.colormap_type !== 1 ) {
throw new Error( 'THREE.TGALoader: Invalid type colormap data for indexed type.' );
}
break;
// check colormap type
case TGA_TYPE_RGB:
case TGA_TYPE_GREY:
case TGA_TYPE_RLE_RGB:
case TGA_TYPE_RLE_GREY:
if ( header.colormap_type ) {
throw new Error( 'THREE.TGALoader: Invalid type colormap data for colormap type.' );
}
break;
// What the need of a file without data ?
case TGA_TYPE_NO_DATA:
throw new Error( 'THREE.TGALoader: No data.' );
// Invalid type ?
default:
throw new Error( 'THREE.TGALoader: Invalid type ' + header.image_type );
}
// check image width and height
if ( header.width <= 0 || header.height <= 0 ) {
throw new Error( 'THREE.TGALoader: Invalid image size.' );
}
// check image pixel size
if ( header.pixel_size !== 8 && header.pixel_size !== 16 &&
header.pixel_size !== 24 && header.pixel_size !== 32 ) {
throw new Error( 'THREE.TGALoader: Invalid pixel size ' + header.pixel_size );
}
}
// parse tga image buffer
function tgaParse( use_rle, use_pal, header, offset, data ) {
let pixel_data,
palettes;
const pixel_size = header.pixel_size >> 3;
const pixel_total = header.width * header.height * pixel_size;
// read palettes
if ( use_pal ) {
palettes = data.subarray( offset, offset += header.colormap_length * ( header.colormap_size >> 3 ) );
}
// read RLE
if ( use_rle ) {
pixel_data = new Uint8Array( pixel_total );
let c, count, i;
let shift = 0;
const pixels = new Uint8Array( pixel_size );
while ( shift < pixel_total ) {
c = data[ offset ++ ];
count = ( c & 0x7f ) + 1;
// RLE pixels
if ( c & 0x80 ) {
// bind pixel tmp array
for ( i = 0; i < pixel_size; ++ i ) {
pixels[ i ] = data[ offset ++ ];
}
// copy pixel array
for ( i = 0; i < count; ++ i ) {
pixel_data.set( pixels, shift + i * pixel_size );
}
shift += pixel_size * count;
} else {
// raw pixels
count *= pixel_size;
for ( i = 0; i < count; ++ i ) {
pixel_data[ shift + i ] = data[ offset ++ ];
}
shift += count;
}
}
} else {
// raw pixels
pixel_data = data.subarray(
offset, offset += ( use_pal ? header.width * header.height : pixel_total )
);
}
return {
pixel_data: pixel_data,
palettes: palettes
};
}
function tgaGetImageData8bits( imageData, y_start, y_step, y_end, x_start, x_step, x_end, image, palettes ) {
const colormap = palettes;
let color, i = 0, x, y;
const width = header.width;
for ( y = y_start; y !== y_end; y += y_step ) {
for ( x = x_start; x !== x_end; x += x_step, i ++ ) {
color = image[ i ];
imageData[ ( x + width * y ) * 4 + 3 ] = 255;
imageData[ ( x + width * y ) * 4 + 2 ] = colormap[ ( color * 3 ) + 0 ];
imageData[ ( x + width * y ) * 4 + 1 ] = colormap[ ( color * 3 ) + 1 ];
imageData[ ( x + width * y ) * 4 + 0 ] = colormap[ ( color * 3 ) + 2 ];
}
}
return imageData;
}
function tgaGetImageData16bits( imageData, y_start, y_step, y_end, x_start, x_step, x_end, image ) {
let color, i = 0, x, y;
const width = header.width;
for ( y = y_start; y !== y_end; y += y_step ) {
for ( x = x_start; x !== x_end; x += x_step, i += 2 ) {
color = image[ i + 0 ] + ( image[ i + 1 ] << 8 );
imageData[ ( x + width * y ) * 4 + 0 ] = ( color & 0x7C00 ) >> 7;
imageData[ ( x + width * y ) * 4 + 1 ] = ( color & 0x03E0 ) >> 2;
imageData[ ( x + width * y ) * 4 + 2 ] = ( color & 0x001F ) << 3;
imageData[ ( x + width * y ) * 4 + 3 ] = ( color & 0x8000 ) ? 0 : 255;
}
}
return imageData;
}
function tgaGetImageData24bits( imageData, y_start, y_step, y_end, x_start, x_step, x_end, image ) {
let i = 0, x, y;
const width = header.width;
for ( y = y_start; y !== y_end; y += y_step ) {
for ( x = x_start; x !== x_end; x += x_step, i += 3 ) {
imageData[ ( x + width * y ) * 4 + 3 ] = 255;
imageData[ ( x + width * y ) * 4 + 2 ] = image[ i + 0 ];
imageData[ ( x + width * y ) * 4 + 1 ] = image[ i + 1 ];
imageData[ ( x + width * y ) * 4 + 0 ] = image[ i + 2 ];
}
}
return imageData;
}
function tgaGetImageData32bits( imageData, y_start, y_step, y_end, x_start, x_step, x_end, image ) {
let i = 0, x, y;
const width = header.width;
for ( y = y_start; y !== y_end; y += y_step ) {
for ( x = x_start; x !== x_end; x += x_step, i += 4 ) {
imageData[ ( x + width * y ) * 4 + 2 ] = image[ i + 0 ];
imageData[ ( x + width * y ) * 4 + 1 ] = image[ i + 1 ];
imageData[ ( x + width * y ) * 4 + 0 ] = image[ i + 2 ];
imageData[ ( x + width * y ) * 4 + 3 ] = image[ i + 3 ];
}
}
return imageData;
}
function tgaGetImageDataGrey8bits( imageData, y_start, y_step, y_end, x_start, x_step, x_end, image ) {
let color, i = 0, x, y;
const width = header.width;
for ( y = y_start; y !== y_end; y += y_step ) {
for ( x = x_start; x !== x_end; x += x_step, i ++ ) {
color = image[ i ];
imageData[ ( x + width * y ) * 4 + 0 ] = color;
imageData[ ( x + width * y ) * 4 + 1 ] = color;
imageData[ ( x + width * y ) * 4 + 2 ] = color;
imageData[ ( x + width * y ) * 4 + 3 ] = 255;
}
}
return imageData;
}
function tgaGetImageDataGrey16bits( imageData, y_start, y_step, y_end, x_start, x_step, x_end, image ) {
let i = 0, x, y;
const width = header.width;
for ( y = y_start; y !== y_end; y += y_step ) {
for ( x = x_start; x !== x_end; x += x_step, i += 2 ) {
imageData[ ( x + width * y ) * 4 + 0 ] = image[ i + 0 ];
imageData[ ( x + width * y ) * 4 + 1 ] = image[ i + 0 ];
imageData[ ( x + width * y ) * 4 + 2 ] = image[ i + 0 ];
imageData[ ( x + width * y ) * 4 + 3 ] = image[ i + 1 ];
}
}
return imageData;
}
function getTgaRGBA( data, width, height, image, palette ) {
let x_start,
y_start,
x_step,
y_step,
x_end,
y_end;
switch ( ( header.flags & TGA_ORIGIN_MASK ) >> TGA_ORIGIN_SHIFT ) {
default:
case TGA_ORIGIN_UL:
x_start = 0;
x_step = 1;
x_end = width;
y_start = 0;
y_step = 1;
y_end = height;
break;
case TGA_ORIGIN_BL:
x_start = 0;
x_step = 1;
x_end = width;
y_start = height - 1;
y_step = - 1;
y_end = - 1;
break;
case TGA_ORIGIN_UR:
x_start = width - 1;
x_step = - 1;
x_end = - 1;
y_start = 0;
y_step = 1;
y_end = height;
break;
case TGA_ORIGIN_BR:
x_start = width - 1;
x_step = - 1;
x_end = - 1;
y_start = height - 1;
y_step = - 1;
y_end = - 1;
break;
}
if ( use_grey ) {
switch ( header.pixel_size ) {
case 8:
tgaGetImageDataGrey8bits( data, y_start, y_step, y_end, x_start, x_step, x_end, image );
break;
case 16:
tgaGetImageDataGrey16bits( data, y_start, y_step, y_end, x_start, x_step, x_end, image );
break;
default:
throw new Error( 'THREE.TGALoader: Format not supported.' );
break;
}
} else {
switch ( header.pixel_size ) {
case 8:
tgaGetImageData8bits( data, y_start, y_step, y_end, x_start, x_step, x_end, image, palette );
break;
case 16:
tgaGetImageData16bits( data, y_start, y_step, y_end, x_start, x_step, x_end, image );
break;
case 24:
tgaGetImageData24bits( data, y_start, y_step, y_end, x_start, x_step, x_end, image );
break;
case 32:
tgaGetImageData32bits( data, y_start, y_step, y_end, x_start, x_step, x_end, image );
break;
default:
throw new Error( 'THREE.TGALoader: Format not supported.' );
break;
}
}
// Load image data according to specific method
// let func = 'tgaGetImageData' + (use_grey ? 'Grey' : '') + (header.pixel_size) + 'bits';
// func(data, y_start, y_step, y_end, x_start, x_step, x_end, width, image, palette );
return data;
}
// TGA constants
const TGA_TYPE_NO_DATA = 0,
TGA_TYPE_INDEXED = 1,
TGA_TYPE_RGB = 2,
TGA_TYPE_GREY = 3,
TGA_TYPE_RLE_INDEXED = 9,
TGA_TYPE_RLE_RGB = 10,
TGA_TYPE_RLE_GREY = 11,
TGA_ORIGIN_MASK = 0x30,
TGA_ORIGIN_SHIFT = 0x04,
TGA_ORIGIN_BL = 0x00,
TGA_ORIGIN_BR = 0x01,
TGA_ORIGIN_UL = 0x02,
TGA_ORIGIN_UR = 0x03;
if ( buffer.length < 19 ) throw new Error( 'THREE.TGALoader: Not enough data to contain header.' );
let offset = 0;
const content = new Uint8Array( buffer ),
header = {
id_length: content[ offset ++ ],
colormap_type: content[ offset ++ ],
image_type: content[ offset ++ ],
colormap_index: content[ offset ++ ] | content[ offset ++ ] << 8,
colormap_length: content[ offset ++ ] | content[ offset ++ ] << 8,
colormap_size: content[ offset ++ ],
origin: [
content[ offset ++ ] | content[ offset ++ ] << 8,
content[ offset ++ ] | content[ offset ++ ] << 8
],
width: content[ offset ++ ] | content[ offset ++ ] << 8,
height: content[ offset ++ ] | content[ offset ++ ] << 8,
pixel_size: content[ offset ++ ],
flags: content[ offset ++ ]
};
// check tga if it is valid format
tgaCheckHeader( header );
if ( header.id_length + offset > buffer.length ) {
throw new Error( 'THREE.TGALoader: No data.' );
}
// skip the needn't data
offset += header.id_length;
// get targa information about RLE compression and palette
let use_rle = false,
use_pal = false,
use_grey = false;
switch ( header.image_type ) {
case TGA_TYPE_RLE_INDEXED:
use_rle = true;
use_pal = true;
break;
case TGA_TYPE_INDEXED:
use_pal = true;
break;
case TGA_TYPE_RLE_RGB:
use_rle = true;
break;
case TGA_TYPE_RGB:
break;
case TGA_TYPE_RLE_GREY:
use_rle = true;
use_grey = true;
break;
case TGA_TYPE_GREY:
use_grey = true;
break;
}
//
const imageData = new Uint8Array( header.width * header.height * 4 );
const result = tgaParse( use_rle, use_pal, header, offset, content );
getTgaRGBA( imageData, header.width, header.height, result.pixel_data, result.palettes );
return {
data: imageData,
width: header.width,
height: header.height,
flipY: true,
generateMipmaps: true,
minFilter: LinearMipmapLinearFilter,
};
}
}
export { TGALoader };

59
node_modules/three/examples/jsm/loaders/TIFFLoader.js generated vendored Normal file
View File

@@ -0,0 +1,59 @@
import {
DataTextureLoader,
LinearFilter,
LinearMipmapLinearFilter
} from 'three';
import UTIF from '../libs/utif.module.js';
/**
* A loader for the TIFF texture format.
*
* ```js
* const loader = new TIFFLoader();
* const texture = await loader.loadAsync( 'textures/tiff/crate_lzw.tif' );
* texture.colorSpace = THREE.SRGBColorSpace;
* ```
*
* @augments DataTextureLoader
* @three_import import { TIFFLoader } from 'three/addons/loaders/TIFFLoader.js';
*/
class TIFFLoader extends DataTextureLoader {
/**
* Constructs a new TIFF loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Parses the given TIFF texture data.
*
* @param {ArrayBuffer} buffer - The raw texture data.
* @return {DataTextureLoader~TexData} An object representing the parsed texture data.
*/
parse( buffer ) {
const ifds = UTIF.decode( buffer );
UTIF.decodeImage( buffer, ifds[ 0 ] );
const rgba = UTIF.toRGBA8( ifds[ 0 ] );
return {
width: ifds[ 0 ].width,
height: ifds[ 0 ].height,
data: rgba,
flipY: true,
magFilter: LinearFilter,
minFilter: LinearMipmapLinearFilter
};
}
}
export { TIFFLoader };

261
node_modules/three/examples/jsm/loaders/TTFLoader.js generated vendored Normal file
View File

@@ -0,0 +1,261 @@
import {
FileLoader,
Loader
} from 'three';
import opentype from '../libs/opentype.module.js';
/**
* A loader for the TTF format.
*
* Loads TTF files and converts them into typeface JSON that can be used directly
* to create THREE.Font objects.
*
* ```js
* const loader = new TTFLoader();
* const json = await loader.loadAsync( 'fonts/ttf/kenpixel.ttf' );
* const font = new Font( json );
* ```
*
* @augments Loader
* @three_import import { TTFLoader } from 'three/addons/loaders/TTFLoader.js';
*/
class TTFLoader extends Loader {
/**
* Constructs a new TTF loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* Whether the TTF commands should be reversed or not.
*
* @type {boolean}
* @default false
*/
this.reversed = false;
}
/**
* Starts loading from the given URL and passes the loaded TTF asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Object)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( buffer ) {
try {
onLoad( scope.parse( buffer ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given TTF data and returns a JSON for creating a font.
*
* @param {ArrayBuffer} arraybuffer - The raw TTF data as an array buffer.
* @return {Object} The result JSON.
*/
parse( arraybuffer ) {
function convert( font, reversed ) {
const round = Math.round;
const glyphs = {};
const scale = ( 100000 ) / ( ( font.unitsPerEm || 2048 ) * 72 );
const glyphIndexMap = font.encoding.cmap.glyphIndexMap;
const unicodes = Object.keys( glyphIndexMap );
for ( let i = 0; i < unicodes.length; i ++ ) {
const unicode = unicodes[ i ];
const glyph = font.glyphs.glyphs[ glyphIndexMap[ unicode ] ];
if ( unicode !== undefined ) {
const token = {
ha: round( glyph.advanceWidth * scale ),
x_min: round( glyph.xMin * scale ),
x_max: round( glyph.xMax * scale ),
o: ''
};
if ( reversed ) {
glyph.path.commands = reverseCommands( glyph.path.commands );
}
glyph.path.commands.forEach( function ( command ) {
if ( command.type.toLowerCase() === 'c' ) {
command.type = 'b';
}
token.o += command.type.toLowerCase() + ' ';
if ( command.x !== undefined && command.y !== undefined ) {
token.o += round( command.x * scale ) + ' ' + round( command.y * scale ) + ' ';
}
if ( command.x1 !== undefined && command.y1 !== undefined ) {
token.o += round( command.x1 * scale ) + ' ' + round( command.y1 * scale ) + ' ';
}
if ( command.x2 !== undefined && command.y2 !== undefined ) {
token.o += round( command.x2 * scale ) + ' ' + round( command.y2 * scale ) + ' ';
}
} );
if ( Array.isArray( glyph.unicodes ) && glyph.unicodes.length > 0 ) {
glyph.unicodes.forEach( function ( unicode ) {
glyphs[ String.fromCodePoint( unicode ) ] = token;
} );
} else {
glyphs[ String.fromCodePoint( glyph.unicode ) ] = token;
}
}
}
return {
glyphs: glyphs,
familyName: font.getEnglishName( 'fullName' ),
ascender: round( font.ascender * scale ),
descender: round( font.descender * scale ),
underlinePosition: font.tables.post.underlinePosition,
underlineThickness: font.tables.post.underlineThickness,
boundingBox: {
xMin: font.tables.head.xMin,
xMax: font.tables.head.xMax,
yMin: font.tables.head.yMin,
yMax: font.tables.head.yMax
},
resolution: 1000,
original_font_information: font.tables.name
};
}
function reverseCommands( commands ) {
const paths = [];
let path;
commands.forEach( function ( c ) {
if ( c.type.toLowerCase() === 'm' ) {
path = [ c ];
paths.push( path );
} else if ( c.type.toLowerCase() !== 'z' ) {
path.push( c );
}
} );
const reversed = [];
paths.forEach( function ( p ) {
const result = {
type: 'm',
x: p[ p.length - 1 ].x,
y: p[ p.length - 1 ].y
};
reversed.push( result );
for ( let i = p.length - 1; i > 0; i -- ) {
const command = p[ i ];
const result = { type: command.type };
if ( command.x2 !== undefined && command.y2 !== undefined ) {
result.x1 = command.x2;
result.y1 = command.y2;
result.x2 = command.x1;
result.y2 = command.y1;
} else if ( command.x1 !== undefined && command.y1 !== undefined ) {
result.x1 = command.x1;
result.y1 = command.y1;
}
result.x = p[ i - 1 ].x;
result.y = p[ i - 1 ].y;
reversed.push( result );
}
} );
return reversed;
}
return convert( opentype.parse( arraybuffer ), this.reversed );
}
}
export { TTFLoader };

219
node_modules/three/examples/jsm/loaders/USDLoader.js generated vendored Normal file
View File

@@ -0,0 +1,219 @@
import {
FileLoader,
Loader
} from 'three';
import * as fflate from '../libs/fflate.module.js';
import { USDAParser } from './usd/USDAParser.js';
import { USDCParser } from './usd/USDCParser.js';
/**
* A loader for the USDZ format.
*
* USDZ files that use USDC internally are not yet supported, only USDA.
*
* ```js
* const loader = new USDZLoader();
* const model = await loader.loadAsync( 'saeukkang.usdz' );
* scene.add( model );
* ```
*
* @augments Loader
* @three_import import { USDLoader } from 'three/addons/loaders/USDLoader.js';
*/
class USDLoader extends Loader {
/**
* Constructs a new USDZ loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
}
/**
* Starts loading from the given URL and passes the loaded USDZ asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Group)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given USDZ data and returns the resulting group.
*
* @param {ArrayBuffer|string} buffer - The raw USDZ data as an array buffer.
* @return {Group} The parsed asset as a group.
*/
parse( buffer ) {
const usda = new USDAParser();
const usdc = new USDCParser();
function parseAssets( zip ) {
const data = {};
const loader = new FileLoader();
loader.setResponseType( 'arraybuffer' );
for ( const filename in zip ) {
if ( filename.endsWith( 'png' ) ) {
const blob = new Blob( [ zip[ filename ] ], { type: 'image/png' } );
data[ filename ] = URL.createObjectURL( blob );
}
if ( filename.endsWith( 'usd' ) || filename.endsWith( 'usda' ) || filename.endsWith( 'usdc' ) ) {
if ( isCrateFile( zip[ filename ] ) ) {
data[ filename ] = usdc.parse( zip[ filename ].buffer, data );
} else {
const text = fflate.strFromU8( zip[ filename ] );
data[ filename ] = usda.parseText( text );
}
}
}
return data;
}
function isCrateFile( buffer ) {
const crateHeader = new Uint8Array( [ 0x50, 0x58, 0x52, 0x2D, 0x55, 0x53, 0x44, 0x43 ] ); // PXR-USDC
if ( buffer.byteLength < crateHeader.length ) return false;
const view = new Uint8Array( buffer, 0, crateHeader.length );
for ( let i = 0; i < crateHeader.length; i ++ ) {
if ( view[ i ] !== crateHeader[ i ] ) return false;
}
return true;
}
function findUSD( zip ) {
if ( zip.length < 1 ) return undefined;
const firstFileName = Object.keys( zip )[ 0 ];
let isCrate = false;
// As per the USD specification, the first entry in the zip archive is used as the main file ("UsdStage").
// ASCII files can end in either .usda or .usd.
// See https://openusd.org/release/spec_usdz.html#layout
if ( firstFileName.endsWith( 'usda' ) ) return zip[ firstFileName ];
if ( firstFileName.endsWith( 'usdc' ) ) {
isCrate = true;
} else if ( firstFileName.endsWith( 'usd' ) ) {
// If this is not a crate file, we assume it is a plain USDA file.
if ( ! isCrateFile( zip[ firstFileName ] ) ) {
return zip[ firstFileName ];
} else {
isCrate = true;
}
}
if ( isCrate ) {
return zip[ firstFileName ];
}
}
// USDA
if ( typeof buffer === 'string' ) {
return usda.parse( buffer, {} );
}
// USDC
if ( isCrateFile( buffer ) ) {
return usdc.parse( buffer );
}
// USDZ
const zip = fflate.unzipSync( new Uint8Array( buffer ) );
const assets = parseAssets( zip );
// console.log( assets );
const file = findUSD( zip );
const text = fflate.strFromU8( file );
return usda.parse( text, assets );
}
}
export { USDLoader };

16
node_modules/three/examples/jsm/loaders/USDZLoader.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
import { USDLoader } from './USDLoader.js';
// @deprecated, r179
class USDZLoader extends USDLoader {
constructor( manager ) {
console.warn( 'USDZLoader has been deprecated. Please use USDLoader instead.' );
super( manager );
}
}
export { USDZLoader };

View File

@@ -0,0 +1,630 @@
import {
ClampToEdgeWrapping,
DataTexture,
DataUtils,
FileLoader,
HalfFloatType,
LinearFilter,
LinearMipMapLinearFilter,
LinearSRGBColorSpace,
Loader,
RGBAFormat,
UVMapping,
} from 'three';
/**
* UltraHDR Image Format - https://developer.android.com/media/platform/hdr-image-format
*
* Short format brief:
*
* [JPEG headers]
* [XMP metadata describing the MPF container and *both* SDR and gainmap images]
* [Optional metadata] [EXIF] [ICC Profile]
* [SDR image]
* [XMP metadata describing only the gainmap image]
* [Gainmap image]
*
* Each section is separated by a 0xFFXX byte followed by a descriptor byte (0xFFE0, 0xFFE1, 0xFFE2.)
* Binary image storages are prefixed with a unique 0xFFD8 16-bit descriptor.
*/
// Calculating this SRGB powers is extremely slow for 4K images and can be sufficiently precalculated for a 3-4x speed boost
const SRGB_TO_LINEAR = Array( 1024 )
.fill( 0 )
.map( ( _, value ) =>
Math.pow( ( value / 255 ) * 0.9478672986 + 0.0521327014, 2.4 )
);
/**
* A loader for the Ultra HDR Image Format.
*
* Existing HDR or EXR textures can be converted to Ultra HDR with this [tool]{@link https://gainmap-creator.monogrid.com/}.
*
* Current feature set:
* - JPEG headers (required)
* - XMP metadata (required)
* - XMP validation (not implemented)
* - EXIF profile (not implemented)
* - ICC profile (not implemented)
* - Binary storage for SDR & HDR images (required)
* - Gainmap metadata (required)
* - Non-JPEG image formats (not implemented)
* - Primary image as an HDR image (not implemented)
*
* ```js
* const loader = new UltraHDRLoader();
* const texture = await loader.loadAsync( 'textures/equirectangular/ice_planet_close.jpg' );
* texture.mapping = THREE.EquirectangularReflectionMapping;
*
* scene.background = texture;
* scene.environment = texture;
* ```
*
* @augments Loader
* @three_import import { UltraHDRLoader } from 'three/addons/loaders/UltraHDRLoader.js';
*/
class UltraHDRLoader extends Loader {
/**
* Constructs a new Ultra HDR loader.
*
* @param {LoadingManager} [manager] - The loading manager.
*/
constructor( manager ) {
super( manager );
/**
* The texture type.
*
* @type {(HalfFloatType|FloatType)}
* @default HalfFloatType
*/
this.type = HalfFloatType;
}
/**
* Sets the texture type.
*
* @param {(HalfFloatType|FloatType)} value - The texture type to set.
* @return {UltraHDRLoader} A reference to this loader.
*/
setDataType( value ) {
this.type = value;
return this;
}
/**
* Parses the given Ultra HDR texture data.
*
* @param {ArrayBuffer} buffer - The raw texture data.
* @param {Function} onLoad - The `onLoad` callback.
*/
parse( buffer, onLoad ) {
const xmpMetadata = {
version: null,
baseRenditionIsHDR: null,
gainMapMin: null,
gainMapMax: null,
gamma: null,
offsetSDR: null,
offsetHDR: null,
hdrCapacityMin: null,
hdrCapacityMax: null,
};
const textDecoder = new TextDecoder();
const data = new DataView( buffer );
let byteOffset = 0;
const sections = [];
while ( byteOffset < data.byteLength ) {
const byte = data.getUint8( byteOffset );
if ( byte === 0xff ) {
const leadingByte = data.getUint8( byteOffset + 1 );
if (
[
/* Valid section headers */
0xd8, // SOI
0xe0, // APP0
0xe1, // APP1
0xe2, // APP2
].includes( leadingByte )
) {
sections.push( {
sectionType: leadingByte,
section: [ byte, leadingByte ],
sectionOffset: byteOffset + 2,
} );
byteOffset += 2;
} else {
sections[ sections.length - 1 ].section.push( byte, leadingByte );
byteOffset += 2;
}
} else {
sections[ sections.length - 1 ].section.push( byte );
byteOffset ++;
}
}
let primaryImage, gainmapImage;
for ( let i = 0; i < sections.length; i ++ ) {
const { sectionType, section, sectionOffset } = sections[ i ];
if ( sectionType === 0xe0 ) {
/* JPEG Header - no useful information */
} else if ( sectionType === 0xe1 ) {
/* XMP Metadata */
this._parseXMPMetadata(
textDecoder.decode( new Uint8Array( section ) ),
xmpMetadata
);
} else if ( sectionType === 0xe2 ) {
/* Data Sections - MPF / EXIF / ICC Profile */
const sectionData = new DataView(
new Uint8Array( section.slice( 2 ) ).buffer
);
const sectionHeader = sectionData.getUint32( 2, false );
if ( sectionHeader === 0x4d504600 ) {
/* MPF Section */
/* Section contains a list of static bytes and ends with offsets indicating location of SDR and gainmap images */
/* First bytes after header indicate little / big endian ordering (0x49492A00 - LE / 0x4D4D002A - BE) */
/*
... 60 bytes indicating tags, versions, etc. ...
bytes | bits | description
4 32 primary image size
4 32 primary image offset
2 16 0x0000
2 16 0x0000
4 32 0x00000000
4 32 gainmap image size
4 32 gainmap image offset
2 16 0x0000
2 16 0x0000
*/
const mpfLittleEndian = sectionData.getUint32( 6 ) === 0x49492a00;
const mpfBytesOffset = 60;
/* SDR size includes the metadata length, SDR offset is always 0 */
const primaryImageSize = sectionData.getUint32(
mpfBytesOffset,
mpfLittleEndian
);
const primaryImageOffset = sectionData.getUint32(
mpfBytesOffset + 4,
mpfLittleEndian
);
/* Gainmap size is an absolute value starting from its offset, gainmap offset needs 6 bytes padding to take into account 0x00 bytes at the end of XMP */
const gainmapImageSize = sectionData.getUint32(
mpfBytesOffset + 16,
mpfLittleEndian
);
const gainmapImageOffset =
sectionData.getUint32( mpfBytesOffset + 20, mpfLittleEndian ) +
sectionOffset +
6;
primaryImage = new Uint8Array(
data.buffer,
primaryImageOffset,
primaryImageSize
);
gainmapImage = new Uint8Array(
data.buffer,
gainmapImageOffset,
gainmapImageSize
);
}
}
}
/* Minimal sufficient validation - https://developer.android.com/media/platform/hdr-image-format#signal_of_the_format */
if ( ! xmpMetadata.version ) {
throw new Error( 'THREE.UltraHDRLoader: Not a valid UltraHDR image' );
}
if ( primaryImage && gainmapImage ) {
this._applyGainmapToSDR(
xmpMetadata,
primaryImage,
gainmapImage,
( hdrBuffer, width, height ) => {
onLoad( {
width,
height,
data: hdrBuffer,
format: RGBAFormat,
type: this.type,
} );
},
( error ) => {
throw new Error( error );
}
);
} else {
throw new Error( 'THREE.UltraHDRLoader: Could not parse UltraHDR images' );
}
}
/**
* Starts loading from the given URL and passes the loaded Ultra HDR texture
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the files to be loaded. This can also be a data URI.
* @param {function(DataTexture, Object)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
* @return {DataTexture} The Ultra HDR texture.
*/
load( url, onLoad, onProgress, onError ) {
const texture = new DataTexture(
this.type === HalfFloatType ? new Uint16Array() : new Float32Array(),
0,
0,
RGBAFormat,
this.type,
UVMapping,
ClampToEdgeWrapping,
ClampToEdgeWrapping,
LinearFilter,
LinearMipMapLinearFilter,
1,
LinearSRGBColorSpace
);
texture.generateMipmaps = true;
texture.flipY = true;
const loader = new FileLoader( this.manager );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setPath( this.path );
loader.setWithCredentials( this.withCredentials );
loader.load( url, ( buffer ) => {
try {
this.parse(
buffer,
( texData ) => {
texture.image = {
data: texData.data,
width: texData.width,
height: texData.height,
};
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture, texData );
}
);
} catch ( error ) {
if ( onError ) onError( error );
console.error( error );
}
}, onProgress, onError );
return texture;
}
_parseXMPMetadata( xmpDataString, xmpMetadata ) {
const domParser = new DOMParser();
const xmpXml = domParser.parseFromString(
xmpDataString.substring(
xmpDataString.indexOf( '<' ),
xmpDataString.lastIndexOf( '>' ) + 1
),
'text/xml'
);
/* Determine if given XMP metadata is the primary GContainer descriptor or a gainmap descriptor */
const [ hasHDRContainerDescriptor ] = xmpXml.getElementsByTagName(
'Container:Directory'
);
if ( hasHDRContainerDescriptor ) {
/* There's not much useful information in the container descriptor besides memory-validation */
} else {
/* Gainmap descriptor - defaults from https://developer.android.com/media/platform/hdr-image-format#HDR_gain_map_metadata */
const [ gainmapNode ] = xmpXml.getElementsByTagName( 'rdf:Description' );
xmpMetadata.version = gainmapNode.getAttribute( 'hdrgm:Version' );
xmpMetadata.baseRenditionIsHDR =
gainmapNode.getAttribute( 'hdrgm:BaseRenditionIsHDR' ) === 'True';
xmpMetadata.gainMapMin = parseFloat(
gainmapNode.getAttribute( 'hdrgm:GainMapMin' ) || 0.0
);
xmpMetadata.gainMapMax = parseFloat(
gainmapNode.getAttribute( 'hdrgm:GainMapMax' ) || 1.0
);
xmpMetadata.gamma = parseFloat(
gainmapNode.getAttribute( 'hdrgm:Gamma' ) || 1.0
);
xmpMetadata.offsetSDR = parseFloat(
gainmapNode.getAttribute( 'hdrgm:OffsetSDR' ) / ( 1 / 64 )
);
xmpMetadata.offsetHDR = parseFloat(
gainmapNode.getAttribute( 'hdrgm:OffsetHDR' ) / ( 1 / 64 )
);
xmpMetadata.hdrCapacityMin = parseFloat(
gainmapNode.getAttribute( 'hdrgm:HDRCapacityMin' ) || 0.0
);
xmpMetadata.hdrCapacityMax = parseFloat(
gainmapNode.getAttribute( 'hdrgm:HDRCapacityMax' ) || 1.0
);
}
}
_srgbToLinear( value ) {
if ( value / 255 < 0.04045 ) {
return ( value / 255 ) * 0.0773993808;
}
if ( value < 1024 ) {
return SRGB_TO_LINEAR[ ~ ~ value ];
}
return Math.pow( ( value / 255 ) * 0.9478672986 + 0.0521327014, 2.4 );
}
_applyGainmapToSDR(
xmpMetadata,
sdrBuffer,
gainmapBuffer,
onSuccess,
onError
) {
const getImageDataFromBuffer = ( buffer ) =>
new Promise( ( resolve, reject ) => {
const imageLoader = document.createElement( 'img' );
imageLoader.onload = () => {
const image = {
width: imageLoader.naturalWidth,
height: imageLoader.naturalHeight,
source: imageLoader,
};
URL.revokeObjectURL( imageLoader.src );
resolve( image );
};
imageLoader.onerror = () => {
URL.revokeObjectURL( imageLoader.src );
reject();
};
imageLoader.src = URL.createObjectURL(
new Blob( [ buffer ], { type: 'image/jpeg' } )
);
} );
Promise.all( [
getImageDataFromBuffer( sdrBuffer ),
getImageDataFromBuffer( gainmapBuffer ),
] )
.then( ( [ sdrImage, gainmapImage ] ) => {
const sdrImageAspect = sdrImage.width / sdrImage.height;
const gainmapImageAspect = gainmapImage.width / gainmapImage.height;
if ( sdrImageAspect !== gainmapImageAspect ) {
onError(
'THREE.UltraHDRLoader Error: Aspect ratio mismatch between SDR and Gainmap images'
);
return;
}
const canvas = document.createElement( 'canvas' );
const ctx = canvas.getContext( '2d', {
willReadFrequently: true,
colorSpace: 'srgb',
} );
canvas.width = sdrImage.width;
canvas.height = sdrImage.height;
/* Use out-of-the-box interpolation of Canvas API to scale gainmap to fit the SDR resolution */
ctx.drawImage(
gainmapImage.source,
0,
0,
gainmapImage.width,
gainmapImage.height,
0,
0,
sdrImage.width,
sdrImage.height
);
const gainmapImageData = ctx.getImageData(
0,
0,
sdrImage.width,
sdrImage.height,
{ colorSpace: 'srgb' }
);
ctx.drawImage( sdrImage.source, 0, 0 );
const sdrImageData = ctx.getImageData(
0,
0,
sdrImage.width,
sdrImage.height,
{ colorSpace: 'srgb' }
);
/* HDR Recovery formula - https://developer.android.com/media/platform/hdr-image-format#use_the_gain_map_to_create_adapted_HDR_rendition */
let hdrBuffer;
if ( this.type === HalfFloatType ) {
hdrBuffer = new Uint16Array( sdrImageData.data.length ).fill( 23544 );
} else {
hdrBuffer = new Float32Array( sdrImageData.data.length ).fill( 255 );
}
const maxDisplayBoost = Math.sqrt(
Math.pow(
/* 1.8 instead of 2 near-perfectly rectifies approximations introduced by precalculated SRGB_TO_LINEAR values */
1.8,
xmpMetadata.hdrCapacityMax
)
);
const unclampedWeightFactor =
( Math.log2( maxDisplayBoost ) - xmpMetadata.hdrCapacityMin ) /
( xmpMetadata.hdrCapacityMax - xmpMetadata.hdrCapacityMin );
const weightFactor = Math.min(
Math.max( unclampedWeightFactor, 0.0 ),
1.0
);
const useGammaOne = xmpMetadata.gamma === 1.0;
for (
let pixelIndex = 0;
pixelIndex < sdrImageData.data.length;
pixelIndex += 4
) {
const x = ( pixelIndex / 4 ) % sdrImage.width;
const y = Math.floor( pixelIndex / 4 / sdrImage.width );
for ( let channelIndex = 0; channelIndex < 3; channelIndex ++ ) {
const sdrValue = sdrImageData.data[ pixelIndex + channelIndex ];
const gainmapIndex = ( y * sdrImage.width + x ) * 4 + channelIndex;
const gainmapValue = gainmapImageData.data[ gainmapIndex ] / 255.0;
/* Gamma is 1.0 by default */
const logRecovery = useGammaOne
? gainmapValue
: Math.pow( gainmapValue, 1.0 / xmpMetadata.gamma );
const logBoost =
xmpMetadata.gainMapMin * ( 1.0 - logRecovery ) +
xmpMetadata.gainMapMax * logRecovery;
const hdrValue =
( sdrValue + xmpMetadata.offsetSDR ) *
( logBoost * weightFactor === 0.0
? 1.0
: Math.pow( 2, logBoost * weightFactor ) ) -
xmpMetadata.offsetHDR;
const linearHDRValue = Math.min(
Math.max( this._srgbToLinear( hdrValue ), 0 ),
65504
);
hdrBuffer[ pixelIndex + channelIndex ] =
this.type === HalfFloatType
? DataUtils.toHalfFloat( linearHDRValue )
: linearHDRValue;
}
}
onSuccess( hdrBuffer, sdrImage.width, sdrImage.height );
} )
.catch( () => {
throw new Error(
'THREE.UltraHDRLoader Error: Could not parse UltraHDR images'
);
} );
}
}
export { UltraHDRLoader };

376
node_modules/three/examples/jsm/loaders/VOXLoader.js generated vendored Normal file
View File

@@ -0,0 +1,376 @@
import {
BufferGeometry,
Color,
Data3DTexture,
FileLoader,
Float32BufferAttribute,
Loader,
LinearFilter,
Mesh,
MeshStandardMaterial,
NearestFilter,
RedFormat,
SRGBColorSpace
} from 'three';
/**
* A loader for the VOX format.
*
* ```js
* const loader = new VOXLoader();
* const chunks = await loader.loadAsync( 'models/vox/monu10.vox' );
*
* for ( let i = 0; i < chunks.length; i ++ ) {
*
* const chunk = chunks[ i ];
* const mesh = new VOXMesh( chunk );
* mesh.scale.setScalar( 0.0015 );
* scene.add( mesh );
*
* }
* ```
* @augments Loader
* @three_import import { VOXLoader } from 'three/addons/loaders/VOXLoader.js';
*/
class VOXLoader extends Loader {
/**
* Starts loading from the given URL and passes the loaded VOX asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(Array<Object>)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.load( url, function ( buffer ) {
try {
onLoad( scope.parse( buffer ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given VOX data and returns the resulting chunks.
*
* @param {ArrayBuffer} buffer - The raw VOX data as an array buffer.
* @return {Array<Object>} The parsed chunks.
*/
parse( buffer ) {
const data = new DataView( buffer );
const id = data.getUint32( 0, true );
const version = data.getUint32( 4, true );
if ( id !== 542658390 ) {
console.error( 'THREE.VOXLoader: Invalid VOX file.' );
return;
}
if ( version !== 150 ) {
console.error( 'THREE.VOXLoader: Invalid VOX file. Unsupported version:', version );
return;
}
const DEFAULT_PALETTE = [
0x00000000, 0xffffffff, 0xffccffff, 0xff99ffff, 0xff66ffff, 0xff33ffff, 0xff00ffff, 0xffffccff,
0xffccccff, 0xff99ccff, 0xff66ccff, 0xff33ccff, 0xff00ccff, 0xffff99ff, 0xffcc99ff, 0xff9999ff,
0xff6699ff, 0xff3399ff, 0xff0099ff, 0xffff66ff, 0xffcc66ff, 0xff9966ff, 0xff6666ff, 0xff3366ff,
0xff0066ff, 0xffff33ff, 0xffcc33ff, 0xff9933ff, 0xff6633ff, 0xff3333ff, 0xff0033ff, 0xffff00ff,
0xffcc00ff, 0xff9900ff, 0xff6600ff, 0xff3300ff, 0xff0000ff, 0xffffffcc, 0xffccffcc, 0xff99ffcc,
0xff66ffcc, 0xff33ffcc, 0xff00ffcc, 0xffffcccc, 0xffcccccc, 0xff99cccc, 0xff66cccc, 0xff33cccc,
0xff00cccc, 0xffff99cc, 0xffcc99cc, 0xff9999cc, 0xff6699cc, 0xff3399cc, 0xff0099cc, 0xffff66cc,
0xffcc66cc, 0xff9966cc, 0xff6666cc, 0xff3366cc, 0xff0066cc, 0xffff33cc, 0xffcc33cc, 0xff9933cc,
0xff6633cc, 0xff3333cc, 0xff0033cc, 0xffff00cc, 0xffcc00cc, 0xff9900cc, 0xff6600cc, 0xff3300cc,
0xff0000cc, 0xffffff99, 0xffccff99, 0xff99ff99, 0xff66ff99, 0xff33ff99, 0xff00ff99, 0xffffcc99,
0xffcccc99, 0xff99cc99, 0xff66cc99, 0xff33cc99, 0xff00cc99, 0xffff9999, 0xffcc9999, 0xff999999,
0xff669999, 0xff339999, 0xff009999, 0xffff6699, 0xffcc6699, 0xff996699, 0xff666699, 0xff336699,
0xff006699, 0xffff3399, 0xffcc3399, 0xff993399, 0xff663399, 0xff333399, 0xff003399, 0xffff0099,
0xffcc0099, 0xff990099, 0xff660099, 0xff330099, 0xff000099, 0xffffff66, 0xffccff66, 0xff99ff66,
0xff66ff66, 0xff33ff66, 0xff00ff66, 0xffffcc66, 0xffcccc66, 0xff99cc66, 0xff66cc66, 0xff33cc66,
0xff00cc66, 0xffff9966, 0xffcc9966, 0xff999966, 0xff669966, 0xff339966, 0xff009966, 0xffff6666,
0xffcc6666, 0xff996666, 0xff666666, 0xff336666, 0xff006666, 0xffff3366, 0xffcc3366, 0xff993366,
0xff663366, 0xff333366, 0xff003366, 0xffff0066, 0xffcc0066, 0xff990066, 0xff660066, 0xff330066,
0xff000066, 0xffffff33, 0xffccff33, 0xff99ff33, 0xff66ff33, 0xff33ff33, 0xff00ff33, 0xffffcc33,
0xffcccc33, 0xff99cc33, 0xff66cc33, 0xff33cc33, 0xff00cc33, 0xffff9933, 0xffcc9933, 0xff999933,
0xff669933, 0xff339933, 0xff009933, 0xffff6633, 0xffcc6633, 0xff996633, 0xff666633, 0xff336633,
0xff006633, 0xffff3333, 0xffcc3333, 0xff993333, 0xff663333, 0xff333333, 0xff003333, 0xffff0033,
0xffcc0033, 0xff990033, 0xff660033, 0xff330033, 0xff000033, 0xffffff00, 0xffccff00, 0xff99ff00,
0xff66ff00, 0xff33ff00, 0xff00ff00, 0xffffcc00, 0xffcccc00, 0xff99cc00, 0xff66cc00, 0xff33cc00,
0xff00cc00, 0xffff9900, 0xffcc9900, 0xff999900, 0xff669900, 0xff339900, 0xff009900, 0xffff6600,
0xffcc6600, 0xff996600, 0xff666600, 0xff336600, 0xff006600, 0xffff3300, 0xffcc3300, 0xff993300,
0xff663300, 0xff333300, 0xff003300, 0xffff0000, 0xffcc0000, 0xff990000, 0xff660000, 0xff330000,
0xff0000ee, 0xff0000dd, 0xff0000bb, 0xff0000aa, 0xff000088, 0xff000077, 0xff000055, 0xff000044,
0xff000022, 0xff000011, 0xff00ee00, 0xff00dd00, 0xff00bb00, 0xff00aa00, 0xff008800, 0xff007700,
0xff005500, 0xff004400, 0xff002200, 0xff001100, 0xffee0000, 0xffdd0000, 0xffbb0000, 0xffaa0000,
0xff880000, 0xff770000, 0xff550000, 0xff440000, 0xff220000, 0xff110000, 0xffeeeeee, 0xffdddddd,
0xffbbbbbb, 0xffaaaaaa, 0xff888888, 0xff777777, 0xff555555, 0xff444444, 0xff222222, 0xff111111
];
let i = 8;
let chunk;
const chunks = [];
while ( i < data.byteLength ) {
let id = '';
for ( let j = 0; j < 4; j ++ ) {
id += String.fromCharCode( data.getUint8( i ++ ) );
}
const chunkSize = data.getUint32( i, true ); i += 4;
i += 4; // childChunks
if ( id === 'SIZE' ) {
const x = data.getUint32( i, true ); i += 4;
const y = data.getUint32( i, true ); i += 4;
const z = data.getUint32( i, true ); i += 4;
chunk = {
palette: DEFAULT_PALETTE,
size: { x: x, y: y, z: z },
};
chunks.push( chunk );
i += chunkSize - ( 3 * 4 );
} else if ( id === 'XYZI' ) {
const numVoxels = data.getUint32( i, true ); i += 4;
chunk.data = new Uint8Array( buffer, i, numVoxels * 4 );
i += numVoxels * 4;
} else if ( id === 'RGBA' ) {
const palette = [ 0 ];
for ( let j = 0; j < 256; j ++ ) {
palette[ j + 1 ] = data.getUint32( i, true ); i += 4;
}
chunk.palette = palette;
} else {
// console.log( id, chunkSize, childChunks );
i += chunkSize;
}
}
return chunks;
}
}
/**
* A VOX mesh.
*
* Instances of this class are created from the loaded chunks of {@link VOXLoader}.
*
* @augments Mesh
*/
class VOXMesh extends Mesh {
/**
* Constructs a new VOX mesh.
*
* @param {Object} chunk - A VOX chunk loaded via {@link VOXLoader}.
*/
constructor( chunk ) {
const data = chunk.data;
const size = chunk.size;
const palette = chunk.palette;
//
const vertices = [];
const colors = [];
const nx = [ 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1 ];
const px = [ 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0 ];
const py = [ 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1 ];
const ny = [ 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0 ];
const nz = [ 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0 ];
const pz = [ 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1 ];
const _color = new Color();
function add( tile, x, y, z, r, g, b ) {
x -= size.x / 2;
y -= size.z / 2;
z += size.y / 2;
for ( let i = 0; i < 18; i += 3 ) {
_color.setRGB( r, g, b, SRGBColorSpace );
vertices.push( tile[ i + 0 ] + x, tile[ i + 1 ] + y, tile[ i + 2 ] + z );
colors.push( _color.r, _color.g, _color.b );
}
}
// Store data in a volume for sampling
const offsety = size.x;
const offsetz = size.x * size.y;
const array = new Uint8Array( size.x * size.y * size.z );
for ( let j = 0; j < data.length; j += 4 ) {
const x = data[ j + 0 ];
const y = data[ j + 1 ];
const z = data[ j + 2 ];
const index = x + ( y * offsety ) + ( z * offsetz );
array[ index ] = 255;
}
// Construct geometry
let hasColors = false;
for ( let j = 0; j < data.length; j += 4 ) {
const x = data[ j + 0 ];
const y = data[ j + 1 ];
const z = data[ j + 2 ];
const c = data[ j + 3 ];
const hex = palette[ c ];
const r = ( hex >> 0 & 0xff ) / 0xff;
const g = ( hex >> 8 & 0xff ) / 0xff;
const b = ( hex >> 16 & 0xff ) / 0xff;
if ( r > 0 || g > 0 || b > 0 ) hasColors = true;
const index = x + ( y * offsety ) + ( z * offsetz );
if ( array[ index + 1 ] === 0 || x === size.x - 1 ) add( px, x, z, - y, r, g, b );
if ( array[ index - 1 ] === 0 || x === 0 ) add( nx, x, z, - y, r, g, b );
if ( array[ index + offsety ] === 0 || y === size.y - 1 ) add( ny, x, z, - y, r, g, b );
if ( array[ index - offsety ] === 0 || y === 0 ) add( py, x, z, - y, r, g, b );
if ( array[ index + offsetz ] === 0 || z === size.z - 1 ) add( pz, x, z, - y, r, g, b );
if ( array[ index - offsetz ] === 0 || z === 0 ) add( nz, x, z, - y, r, g, b );
}
const geometry = new BufferGeometry();
geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
geometry.computeVertexNormals();
const material = new MeshStandardMaterial();
if ( hasColors ) {
geometry.setAttribute( 'color', new Float32BufferAttribute( colors, 3 ) );
material.vertexColors = true;
}
super( geometry, material );
}
}
/**
* A VOX 3D texture.
*
* Instances of this class are created from the loaded chunks of {@link VOXLoader}.
*
* @augments Data3DTexture
*/
class VOXData3DTexture extends Data3DTexture {
/**
* Constructs a new VOX 3D texture.
*
* @param {Object} chunk - A VOX chunk loaded via {@link VOXLoader}.
*/
constructor( chunk ) {
const data = chunk.data;
const size = chunk.size;
const offsety = size.x;
const offsetz = size.x * size.y;
const array = new Uint8Array( size.x * size.y * size.z );
for ( let j = 0; j < data.length; j += 4 ) {
const x = data[ j + 0 ];
const y = data[ j + 1 ];
const z = data[ j + 2 ];
const index = x + ( y * offsety ) + ( z * offsetz );
array[ index ] = 255;
}
super( array, size.x, size.y, size.z );
this.format = RedFormat;
this.minFilter = NearestFilter;
this.magFilter = LinearFilter;
this.unpackAlignment = 1;
this.needsUpdate = true;
}
}
export { VOXLoader, VOXMesh, VOXData3DTexture };

3569
node_modules/three/examples/jsm/loaders/VRMLLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1276
node_modules/three/examples/jsm/loaders/VTKLoader.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

143
node_modules/three/examples/jsm/loaders/XYZLoader.js generated vendored Normal file
View File

@@ -0,0 +1,143 @@
import {
BufferGeometry,
Color,
FileLoader,
Float32BufferAttribute,
Loader,
SRGBColorSpace
} from 'three';
/**
* A loader for the XYZ format.
*
* XYZ is a very simple format for storing point clouds. The layouts
* `XYZ` (points) and `XYZRGB` (points + colors) are supported.
*
* ```js
* const loader = new XYZLoader();
* const geometry = await loader.loadAsync( 'models/xyz/helix_201.xyz' );
* geometry.center();
*
* const vertexColors = ( geometry.hasAttribute( 'color' ) === true );
* const material = new THREE.PointsMaterial( { size: 0.1, vertexColors: vertexColors } );
*
* const points = new THREE.Points( geometry, material );
* scene.add( points );
* ```
*
* @augments Loader
* @three_import import { XYZLoader } from 'three/addons/loaders/XYZLoader.js';
*/
class XYZLoader extends Loader {
/**
* Starts loading from the given URL and passes the loaded XYZ asset
* to the `onLoad()` callback.
*
* @param {string} url - The path/URL of the file to be loaded. This can also be a data URI.
* @param {function(BufferGeometry)} onLoad - Executed when the loading process has been finished.
* @param {onProgressCallback} onProgress - Executed while the loading is in progress.
* @param {onErrorCallback} onError - Executed when errors occur.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
* Parses the given XYZ data and returns the resulting geometry.
*
* @param {string} text - The raw XYZ data as a string.
* @return {BufferGeometry} The geometry representing the point cloud.
*/
parse( text ) {
const lines = text.split( '\n' );
const vertices = [];
const colors = [];
const color = new Color();
for ( let line of lines ) {
line = line.trim();
if ( line.charAt( 0 ) === '#' ) continue; // skip comments
const lineValues = line.split( /\s+/ );
if ( lineValues.length === 3 ) {
// XYZ
vertices.push( parseFloat( lineValues[ 0 ] ) );
vertices.push( parseFloat( lineValues[ 1 ] ) );
vertices.push( parseFloat( lineValues[ 2 ] ) );
}
if ( lineValues.length === 6 ) {
// XYZRGB
vertices.push( parseFloat( lineValues[ 0 ] ) );
vertices.push( parseFloat( lineValues[ 1 ] ) );
vertices.push( parseFloat( lineValues[ 2 ] ) );
const r = parseFloat( lineValues[ 3 ] ) / 255;
const g = parseFloat( lineValues[ 4 ] ) / 255;
const b = parseFloat( lineValues[ 5 ] ) / 255;
color.setRGB( r, g, b, SRGBColorSpace );
colors.push( color.r, color.g, color.b );
}
}
const geometry = new BufferGeometry();
geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) );
if ( colors.length > 0 ) {
geometry.setAttribute( 'color', new Float32BufferAttribute( colors, 3 ) );
}
return geometry;
}
}
export { XYZLoader };

1217
node_modules/three/examples/jsm/loaders/lwo/IFFParser.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,414 @@
class LWO2Parser {
constructor( IFFParser ) {
this.IFF = IFFParser;
}
parseBlock() {
this.IFF.debugger.offset = this.IFF.reader.offset;
this.IFF.debugger.closeForms();
const blockID = this.IFF.reader.getIDTag();
let length = this.IFF.reader.getUint32(); // size of data in bytes
if ( length > this.IFF.reader.dv.byteLength - this.IFF.reader.offset ) {
this.IFF.reader.offset -= 4;
length = this.IFF.reader.getUint16();
}
this.IFF.debugger.dataOffset = this.IFF.reader.offset;
this.IFF.debugger.length = length;
// Data types may be found in either LWO2 OR LWO3 spec
switch ( blockID ) {
case 'FORM': // form blocks may consist of sub -chunks or sub-forms
this.IFF.parseForm( length );
break;
// SKIPPED CHUNKS
// if break; is called directly, the position in the lwoTree is not created
// any sub chunks and forms are added to the parent form instead
// MISC skipped
case 'ICON': // Thumbnail Icon Image
case 'VMPA': // Vertex Map Parameter
case 'BBOX': // bounding box
// case 'VMMD':
// case 'VTYP':
// normal maps can be specified, normally on models imported from other applications. Currently ignored
case 'NORM':
// ENVL FORM skipped
case 'PRE ':
case 'POST':
case 'KEY ':
case 'SPAN':
// CLIP FORM skipped
case 'TIME':
case 'CLRS':
case 'CLRA':
case 'FILT':
case 'DITH':
case 'CONT':
case 'BRIT':
case 'SATR':
case 'HUE ':
case 'GAMM':
case 'NEGA':
case 'IFLT':
case 'PFLT':
// Image Map Layer skipped
case 'PROJ':
case 'AXIS':
case 'AAST':
case 'PIXB':
case 'AUVO':
case 'STCK':
// Procedural Textures skipped
case 'PROC':
case 'VALU':
case 'FUNC':
// Gradient Textures skipped
case 'PNAM':
case 'INAM':
case 'GRST':
case 'GREN':
case 'GRPT':
case 'FKEY':
case 'IKEY':
// Texture Mapping Form skipped
case 'CSYS':
// Surface CHUNKs skipped
case 'OPAQ': // top level 'opacity' checkbox
case 'CMAP': // clip map
// Surface node CHUNKS skipped
// These mainly specify the node editor setup in LW
case 'NLOC':
case 'NZOM':
case 'NVER':
case 'NSRV':
case 'NVSK': // unknown
case 'NCRD':
case 'WRPW': // image wrap w ( for cylindrical and spherical projections)
case 'WRPH': // image wrap h
case 'NMOD':
case 'NSEL':
case 'NPRW':
case 'NPLA':
case 'NODS':
case 'VERS':
case 'ENUM':
case 'TAG ':
case 'OPAC':
// Car Material CHUNKS
case 'CGMD':
case 'CGTY':
case 'CGST':
case 'CGEN':
case 'CGTS':
case 'CGTE':
case 'OSMP':
case 'OMDE':
case 'OUTR':
case 'FLAG':
case 'TRNL':
case 'GLOW':
case 'GVAL': // glow intensity
case 'SHRP':
case 'RFOP':
case 'RSAN':
case 'TROP':
case 'RBLR':
case 'TBLR':
case 'CLRH':
case 'CLRF':
case 'ADTR':
case 'LINE':
case 'ALPH':
case 'VCOL':
case 'ENAB':
this.IFF.debugger.skipped = true;
this.IFF.reader.skip( length );
break;
case 'SURF':
this.IFF.parseSurfaceLwo2( length );
break;
case 'CLIP':
this.IFF.parseClipLwo2( length );
break;
// Texture node chunks (not in spec)
case 'IPIX': // usePixelBlending
case 'IMIP': // useMipMaps
case 'IMOD': // imageBlendingMode
case 'AMOD': // unknown
case 'IINV': // imageInvertAlpha
case 'INCR': // imageInvertColor
case 'IAXS': // imageAxis ( for non-UV maps)
case 'IFOT': // imageFallofType
case 'ITIM': // timing for animated textures
case 'IWRL':
case 'IUTI':
case 'IINX':
case 'IINY':
case 'IINZ':
case 'IREF': // possibly a VX for reused texture nodes
if ( length === 4 ) this.IFF.currentNode[ blockID ] = this.IFF.reader.getInt32();
else this.IFF.reader.skip( length );
break;
case 'OTAG':
this.IFF.parseObjectTag();
break;
case 'LAYR':
this.IFF.parseLayer( length );
break;
case 'PNTS':
this.IFF.parsePoints( length );
break;
case 'VMAP':
this.IFF.parseVertexMapping( length );
break;
case 'AUVU':
case 'AUVN':
this.IFF.reader.skip( length - 1 );
this.IFF.reader.getVariableLengthIndex(); // VX
break;
case 'POLS':
this.IFF.parsePolygonList( length );
break;
case 'TAGS':
this.IFF.parseTagStrings( length );
break;
case 'PTAG':
this.IFF.parsePolygonTagMapping( length );
break;
case 'VMAD':
this.IFF.parseVertexMapping( length, true );
break;
// Misc CHUNKS
case 'DESC': // Description Line
this.IFF.currentForm.description = this.IFF.reader.getString();
break;
case 'TEXT':
case 'CMNT':
case 'NCOM':
this.IFF.currentForm.comment = this.IFF.reader.getString();
break;
// Envelope Form
case 'NAME':
this.IFF.currentForm.channelName = this.IFF.reader.getString();
break;
// Image Map Layer
case 'WRAP':
this.IFF.currentForm.wrap = { w: this.IFF.reader.getUint16(), h: this.IFF.reader.getUint16() };
break;
case 'IMAG':
const index = this.IFF.reader.getVariableLengthIndex();
this.IFF.currentForm.imageIndex = index;
break;
// Texture Mapping Form
case 'OREF':
this.IFF.currentForm.referenceObject = this.IFF.reader.getString();
break;
case 'ROID':
this.IFF.currentForm.referenceObjectID = this.IFF.reader.getUint32();
break;
// Surface Blocks
case 'SSHN':
this.IFF.currentSurface.surfaceShaderName = this.IFF.reader.getString();
break;
case 'AOVN':
this.IFF.currentSurface.surfaceCustomAOVName = this.IFF.reader.getString();
break;
// Nodal Blocks
case 'NSTA':
this.IFF.currentForm.disabled = this.IFF.reader.getUint16();
break;
case 'NRNM':
this.IFF.currentForm.realName = this.IFF.reader.getString();
break;
case 'NNME':
this.IFF.currentForm.refName = this.IFF.reader.getString();
this.IFF.currentSurface.nodes[ this.IFF.currentForm.refName ] = this.IFF.currentForm;
break;
// Nodal Blocks : connections
case 'INME':
if ( ! this.IFF.currentForm.nodeName ) this.IFF.currentForm.nodeName = [];
this.IFF.currentForm.nodeName.push( this.IFF.reader.getString() );
break;
case 'IINN':
if ( ! this.IFF.currentForm.inputNodeName ) this.IFF.currentForm.inputNodeName = [];
this.IFF.currentForm.inputNodeName.push( this.IFF.reader.getString() );
break;
case 'IINM':
if ( ! this.IFF.currentForm.inputName ) this.IFF.currentForm.inputName = [];
this.IFF.currentForm.inputName.push( this.IFF.reader.getString() );
break;
case 'IONM':
if ( ! this.IFF.currentForm.inputOutputName ) this.IFF.currentForm.inputOutputName = [];
this.IFF.currentForm.inputOutputName.push( this.IFF.reader.getString() );
break;
case 'FNAM':
this.IFF.currentForm.fileName = this.IFF.reader.getString();
break;
case 'CHAN': // NOTE: ENVL Forms may also have CHAN chunk, however ENVL is currently ignored
if ( length === 4 ) this.IFF.currentForm.textureChannel = this.IFF.reader.getIDTag();
else this.IFF.reader.skip( length );
break;
// LWO2 Spec chunks: these are needed since the SURF FORMs are often in LWO2 format
case 'SMAN':
const maxSmoothingAngle = this.IFF.reader.getFloat32();
this.IFF.currentSurface.attributes.smooth = ( maxSmoothingAngle < 0 ) ? false : true;
break;
// LWO2: Basic Surface Parameters
case 'COLR':
this.IFF.currentSurface.attributes.Color = { value: this.IFF.reader.getFloat32Array( 3 ) };
this.IFF.reader.skip( 2 ); // VX: envelope
break;
case 'LUMI':
this.IFF.currentSurface.attributes.Luminosity = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'SPEC':
this.IFF.currentSurface.attributes.Specular = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'DIFF':
this.IFF.currentSurface.attributes.Diffuse = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'REFL':
this.IFF.currentSurface.attributes.Reflection = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'GLOS':
this.IFF.currentSurface.attributes.Glossiness = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'TRAN':
this.IFF.currentSurface.attributes.opacity = this.IFF.reader.getFloat32();
this.IFF.reader.skip( 2 );
break;
case 'BUMP':
this.IFF.currentSurface.attributes.bumpStrength = this.IFF.reader.getFloat32();
this.IFF.reader.skip( 2 );
break;
case 'SIDE':
this.IFF.currentSurface.attributes.side = this.IFF.reader.getUint16();
break;
case 'RIMG':
this.IFF.currentSurface.attributes.reflectionMap = this.IFF.reader.getVariableLengthIndex();
break;
case 'RIND':
this.IFF.currentSurface.attributes.refractiveIndex = this.IFF.reader.getFloat32();
this.IFF.reader.skip( 2 );
break;
case 'TIMG':
this.IFF.currentSurface.attributes.refractionMap = this.IFF.reader.getVariableLengthIndex();
break;
case 'IMAP':
this.IFF.reader.skip( 2 );
break;
case 'TMAP':
this.IFF.debugger.skipped = true;
this.IFF.reader.skip( length ); // needs implementing
break;
case 'IUVI': // uv channel name
this.IFF.currentNode.UVChannel = this.IFF.reader.getString( length );
break;
case 'IUTL': // widthWrappingMode: 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
this.IFF.currentNode.widthWrappingMode = this.IFF.reader.getUint32();
break;
case 'IVTL': // heightWrappingMode
this.IFF.currentNode.heightWrappingMode = this.IFF.reader.getUint32();
break;
// LWO2 USE
case 'BLOK':
// skip
break;
default:
this.IFF.parseUnknownCHUNK( blockID, length );
}
if ( blockID != 'FORM' ) {
this.IFF.debugger.node = 1;
this.IFF.debugger.nodeID = blockID;
this.IFF.debugger.log();
}
if ( this.IFF.reader.offset >= this.IFF.currentFormEnd ) {
this.IFF.currentForm = this.IFF.parentForm;
}
}
}
export { LWO2Parser };

View File

@@ -0,0 +1,373 @@
class LWO3Parser {
constructor( IFFParser ) {
this.IFF = IFFParser;
}
parseBlock() {
this.IFF.debugger.offset = this.IFF.reader.offset;
this.IFF.debugger.closeForms();
const blockID = this.IFF.reader.getIDTag();
const length = this.IFF.reader.getUint32(); // size of data in bytes
this.IFF.debugger.dataOffset = this.IFF.reader.offset;
this.IFF.debugger.length = length;
// Data types may be found in either LWO2 OR LWO3 spec
switch ( blockID ) {
case 'FORM': // form blocks may consist of sub -chunks or sub-forms
this.IFF.parseForm( length );
break;
// SKIPPED CHUNKS
// MISC skipped
case 'ICON': // Thumbnail Icon Image
case 'VMPA': // Vertex Map Parameter
case 'BBOX': // bounding box
// case 'VMMD':
// case 'VTYP':
// normal maps can be specified, normally on models imported from other applications. Currently ignored
case 'NORM':
// ENVL FORM skipped
case 'PRE ': // Pre-loop behavior for the keyframe
case 'POST': // Post-loop behavior for the keyframe
case 'KEY ':
case 'SPAN':
// CLIP FORM skipped
case 'TIME':
case 'CLRS':
case 'CLRA':
case 'FILT':
case 'DITH':
case 'CONT':
case 'BRIT':
case 'SATR':
case 'HUE ':
case 'GAMM':
case 'NEGA':
case 'IFLT':
case 'PFLT':
// Image Map Layer skipped
case 'PROJ':
case 'AXIS':
case 'AAST':
case 'PIXB':
case 'STCK':
// Procedural Textures skipped
case 'VALU':
// Gradient Textures skipped
case 'PNAM':
case 'INAM':
case 'GRST':
case 'GREN':
case 'GRPT':
case 'FKEY':
case 'IKEY':
// Texture Mapping Form skipped
case 'CSYS':
// Surface CHUNKs skipped
case 'OPAQ': // top level 'opacity' checkbox
case 'CMAP': // clip map
// Surface node CHUNKS skipped
// These mainly specify the node editor setup in LW
case 'NLOC':
case 'NZOM':
case 'NVER':
case 'NSRV':
case 'NCRD':
case 'NMOD':
case 'NSEL':
case 'NPRW':
case 'NPLA':
case 'VERS':
case 'ENUM':
case 'TAG ':
// Car Material CHUNKS
case 'CGMD':
case 'CGTY':
case 'CGST':
case 'CGEN':
case 'CGTS':
case 'CGTE':
case 'OSMP':
case 'OMDE':
case 'OUTR':
case 'FLAG':
case 'TRNL':
case 'SHRP':
case 'RFOP':
case 'RSAN':
case 'TROP':
case 'RBLR':
case 'TBLR':
case 'CLRH':
case 'CLRF':
case 'ADTR':
case 'GLOW':
case 'LINE':
case 'ALPH':
case 'VCOL':
case 'ENAB':
this.IFF.debugger.skipped = true;
this.IFF.reader.skip( length );
break;
// Texture node chunks (not in spec)
case 'IPIX': // usePixelBlending
case 'IMIP': // useMipMaps
case 'IMOD': // imageBlendingMode
case 'AMOD': // unknown
case 'IINV': // imageInvertAlpha
case 'INCR': // imageInvertColor
case 'IAXS': // imageAxis ( for non-UV maps)
case 'IFOT': // imageFallofType
case 'ITIM': // timing for animated textures
case 'IWRL':
case 'IUTI':
case 'IINX':
case 'IINY':
case 'IINZ':
case 'IREF': // possibly a VX for reused texture nodes
if ( length === 4 ) this.IFF.currentNode[ blockID ] = this.IFF.reader.getInt32();
else this.IFF.reader.skip( length );
break;
case 'OTAG':
this.IFF.parseObjectTag();
break;
case 'LAYR':
this.IFF.parseLayer( length );
break;
case 'PNTS':
this.IFF.parsePoints( length );
break;
case 'VMAP':
this.IFF.parseVertexMapping( length );
break;
case 'POLS':
this.IFF.parsePolygonList( length );
break;
case 'TAGS':
this.IFF.parseTagStrings( length );
break;
case 'PTAG':
this.IFF.parsePolygonTagMapping( length );
break;
case 'VMAD':
this.IFF.parseVertexMapping( length, true );
break;
// Misc CHUNKS
case 'DESC': // Description Line
this.IFF.currentForm.description = this.IFF.reader.getString();
break;
case 'TEXT':
case 'CMNT':
case 'NCOM':
this.IFF.currentForm.comment = this.IFF.reader.getString();
break;
// Envelope Form
case 'NAME':
this.IFF.currentForm.channelName = this.IFF.reader.getString();
break;
// Image Map Layer
case 'WRAP':
this.IFF.currentForm.wrap = { w: this.IFF.reader.getUint16(), h: this.IFF.reader.getUint16() };
break;
case 'IMAG':
const index = this.IFF.reader.getVariableLengthIndex();
this.IFF.currentForm.imageIndex = index;
break;
// Texture Mapping Form
case 'OREF':
this.IFF.currentForm.referenceObject = this.IFF.reader.getString();
break;
case 'ROID':
this.IFF.currentForm.referenceObjectID = this.IFF.reader.getUint32();
break;
// Surface Blocks
case 'SSHN':
this.IFF.currentSurface.surfaceShaderName = this.IFF.reader.getString();
break;
case 'AOVN':
this.IFF.currentSurface.surfaceCustomAOVName = this.IFF.reader.getString();
break;
// Nodal Blocks
case 'NSTA':
this.IFF.currentForm.disabled = this.IFF.reader.getUint16();
break;
case 'NRNM':
this.IFF.currentForm.realName = this.IFF.reader.getString();
break;
case 'NNME':
this.IFF.currentForm.refName = this.IFF.reader.getString();
this.IFF.currentSurface.nodes[ this.IFF.currentForm.refName ] = this.IFF.currentForm;
break;
// Nodal Blocks : connections
case 'INME':
if ( ! this.IFF.currentForm.nodeName ) this.IFF.currentForm.nodeName = [];
this.IFF.currentForm.nodeName.push( this.IFF.reader.getString() );
break;
case 'IINN':
if ( ! this.IFF.currentForm.inputNodeName ) this.IFF.currentForm.inputNodeName = [];
this.IFF.currentForm.inputNodeName.push( this.IFF.reader.getString() );
break;
case 'IINM':
if ( ! this.IFF.currentForm.inputName ) this.IFF.currentForm.inputName = [];
this.IFF.currentForm.inputName.push( this.IFF.reader.getString() );
break;
case 'IONM':
if ( ! this.IFF.currentForm.inputOutputName ) this.IFF.currentForm.inputOutputName = [];
this.IFF.currentForm.inputOutputName.push( this.IFF.reader.getString() );
break;
case 'FNAM':
this.IFF.currentForm.fileName = this.IFF.reader.getString();
break;
case 'CHAN': // NOTE: ENVL Forms may also have CHAN chunk, however ENVL is currently ignored
if ( length === 4 ) this.IFF.currentForm.textureChannel = this.IFF.reader.getIDTag();
else this.IFF.reader.skip( length );
break;
// LWO2 Spec chunks: these are needed since the SURF FORMs are often in LWO2 format
case 'SMAN':
const maxSmoothingAngle = this.IFF.reader.getFloat32();
this.IFF.currentSurface.attributes.smooth = ( maxSmoothingAngle < 0 ) ? false : true;
break;
// LWO2: Basic Surface Parameters
case 'COLR':
this.IFF.currentSurface.attributes.Color = { value: this.IFF.reader.getFloat32Array( 3 ) };
this.IFF.reader.skip( 2 ); // VX: envelope
break;
case 'LUMI':
this.IFF.currentSurface.attributes.Luminosity = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'SPEC':
this.IFF.currentSurface.attributes.Specular = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'DIFF':
this.IFF.currentSurface.attributes.Diffuse = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'REFL':
this.IFF.currentSurface.attributes.Reflection = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'GLOS':
this.IFF.currentSurface.attributes.Glossiness = { value: this.IFF.reader.getFloat32() };
this.IFF.reader.skip( 2 );
break;
case 'TRAN':
this.IFF.currentSurface.attributes.opacity = this.IFF.reader.getFloat32();
this.IFF.reader.skip( 2 );
break;
case 'BUMP':
this.IFF.currentSurface.attributes.bumpStrength = this.IFF.reader.getFloat32();
this.IFF.reader.skip( 2 );
break;
case 'SIDE':
this.IFF.currentSurface.attributes.side = this.IFF.reader.getUint16();
break;
case 'RIMG':
this.IFF.currentSurface.attributes.reflectionMap = this.IFF.reader.getVariableLengthIndex();
break;
case 'RIND':
this.IFF.currentSurface.attributes.refractiveIndex = this.IFF.reader.getFloat32();
this.IFF.reader.skip( 2 );
break;
case 'TIMG':
this.IFF.currentSurface.attributes.refractionMap = this.IFF.reader.getVariableLengthIndex();
break;
case 'IMAP':
this.IFF.currentSurface.attributes.imageMapIndex = this.IFF.reader.getUint32();
break;
case 'IUVI': // uv channel name
this.IFF.currentNode.UVChannel = this.IFF.reader.getString( length );
break;
case 'IUTL': // widthWrappingMode: 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge
this.IFF.currentNode.widthWrappingMode = this.IFF.reader.getUint32();
break;
case 'IVTL': // heightWrappingMode
this.IFF.currentNode.heightWrappingMode = this.IFF.reader.getUint32();
break;
default:
this.IFF.parseUnknownCHUNK( blockID, length );
}
if ( blockID != 'FORM' ) {
this.IFF.debugger.node = 1;
this.IFF.debugger.nodeID = blockID;
this.IFF.debugger.log();
}
if ( this.IFF.reader.offset >= this.IFF.currentFormEnd ) {
this.IFF.currentForm = this.IFF.parentForm;
}
}
}
export { LWO3Parser };

View File

@@ -0,0 +1,741 @@
import {
BufferAttribute,
BufferGeometry,
ClampToEdgeWrapping,
Group,
NoColorSpace,
Mesh,
MeshPhysicalMaterial,
MirroredRepeatWrapping,
RepeatWrapping,
SRGBColorSpace,
TextureLoader,
Object3D,
Vector2
} from 'three';
class USDAParser {
parseText( text ) {
const root = {};
const lines = text.split( '\n' );
let string = null;
let target = root;
const stack = [ root ];
// Parse USDA file
for ( const line of lines ) {
// console.log( line );
if ( line.includes( '=' ) ) {
const assignment = line.split( '=' );
const lhs = assignment[ 0 ].trim();
const rhs = assignment[ 1 ].trim();
if ( rhs.endsWith( '{' ) ) {
const group = {};
stack.push( group );
target[ lhs ] = group;
target = group;
} else if ( rhs.endsWith( '(' ) ) {
// see #28631
const values = rhs.slice( 0, - 1 );
target[ lhs ] = values;
const meta = {};
stack.push( meta );
target = meta;
} else {
target[ lhs ] = rhs;
}
} else if ( line.endsWith( '{' ) ) {
const group = target[ string ] || {};
stack.push( group );
target[ string ] = group;
target = group;
} else if ( line.endsWith( '}' ) ) {
stack.pop();
if ( stack.length === 0 ) continue;
target = stack[ stack.length - 1 ];
} else if ( line.endsWith( '(' ) ) {
const meta = {};
stack.push( meta );
string = line.split( '(' )[ 0 ].trim() || string;
target[ string ] = meta;
target = meta;
} else if ( line.endsWith( ')' ) ) {
stack.pop();
target = stack[ stack.length - 1 ];
} else {
string = line.trim();
}
}
return root;
}
parse( text, assets ) {
const root = this.parseText( text );
// Build scene graph
function findMeshGeometry( data ) {
if ( ! data ) return undefined;
if ( 'prepend references' in data ) {
const reference = data[ 'prepend references' ];
const parts = reference.split( '@' );
const path = parts[ 1 ].replace( /^.\//, '' );
const id = parts[ 2 ].replace( /^<\//, '' ).replace( />$/, '' );
return findGeometry( assets[ path ], id );
}
return findGeometry( data );
}
function findGeometry( data, id ) {
if ( ! data ) return undefined;
if ( id !== undefined ) {
const def = `def Mesh "${id}"`;
if ( def in data ) {
return data[ def ];
}
}
for ( const name in data ) {
const object = data[ name ];
if ( name.startsWith( 'def Mesh' ) ) {
return object;
}
if ( typeof object === 'object' ) {
const geometry = findGeometry( object );
if ( geometry ) return geometry;
}
}
}
function buildGeometry( data ) {
if ( ! data ) return undefined;
const geometry = new BufferGeometry();
let indices = null;
let counts = null;
let uvs = null;
let positionsLength = - 1;
// index
if ( 'int[] faceVertexIndices' in data ) {
indices = JSON.parse( data[ 'int[] faceVertexIndices' ] );
}
// face count
if ( 'int[] faceVertexCounts' in data ) {
counts = JSON.parse( data[ 'int[] faceVertexCounts' ] );
indices = toTriangleIndices( indices, counts );
}
// position
if ( 'point3f[] points' in data ) {
const positions = JSON.parse( data[ 'point3f[] points' ].replace( /[()]*/g, '' ) );
positionsLength = positions.length;
let attribute = new BufferAttribute( new Float32Array( positions ), 3 );
if ( indices !== null ) attribute = toFlatBufferAttribute( attribute, indices );
geometry.setAttribute( 'position', attribute );
}
// uv
if ( 'float2[] primvars:st' in data ) {
data[ 'texCoord2f[] primvars:st' ] = data[ 'float2[] primvars:st' ];
}
if ( 'texCoord2f[] primvars:st' in data ) {
uvs = JSON.parse( data[ 'texCoord2f[] primvars:st' ].replace( /[()]*/g, '' ) );
let attribute = new BufferAttribute( new Float32Array( uvs ), 2 );
if ( indices !== null ) attribute = toFlatBufferAttribute( attribute, indices );
geometry.setAttribute( 'uv', attribute );
}
if ( 'int[] primvars:st:indices' in data && uvs !== null ) {
// custom uv index, overwrite uvs with new data
const attribute = new BufferAttribute( new Float32Array( uvs ), 2 );
let indices = JSON.parse( data[ 'int[] primvars:st:indices' ] );
indices = toTriangleIndices( indices, counts );
geometry.setAttribute( 'uv', toFlatBufferAttribute( attribute, indices ) );
}
// normal
if ( 'normal3f[] normals' in data ) {
const normals = JSON.parse( data[ 'normal3f[] normals' ].replace( /[()]*/g, '' ) );
let attribute = new BufferAttribute( new Float32Array( normals ), 3 );
// normals require a special treatment in USD
if ( normals.length === positionsLength ) {
// raw normal and position data have equal length (like produced by USDZExporter)
if ( indices !== null ) attribute = toFlatBufferAttribute( attribute, indices );
} else {
// unequal length, normals are independent of faceVertexIndices
let indices = Array.from( Array( normals.length / 3 ).keys() ); // [ 0, 1, 2, 3 ... ]
indices = toTriangleIndices( indices, counts );
attribute = toFlatBufferAttribute( attribute, indices );
}
geometry.setAttribute( 'normal', attribute );
} else {
// compute flat vertex normals
geometry.computeVertexNormals();
}
return geometry;
}
function toTriangleIndices( rawIndices, counts ) {
const indices = [];
for ( let i = 0; i < counts.length; i ++ ) {
const count = counts[ i ];
const stride = i * count;
if ( count === 3 ) {
const a = rawIndices[ stride + 0 ];
const b = rawIndices[ stride + 1 ];
const c = rawIndices[ stride + 2 ];
indices.push( a, b, c );
} else if ( count === 4 ) {
const a = rawIndices[ stride + 0 ];
const b = rawIndices[ stride + 1 ];
const c = rawIndices[ stride + 2 ];
const d = rawIndices[ stride + 3 ];
indices.push( a, b, c );
indices.push( a, c, d );
} else {
console.warn( 'THREE.USDZLoader: Face vertex count of %s unsupported.', count );
}
}
return indices;
}
function toFlatBufferAttribute( attribute, indices ) {
const array = attribute.array;
const itemSize = attribute.itemSize;
const array2 = new array.constructor( indices.length * itemSize );
let index = 0, index2 = 0;
for ( let i = 0, l = indices.length; i < l; i ++ ) {
index = indices[ i ] * itemSize;
for ( let j = 0; j < itemSize; j ++ ) {
array2[ index2 ++ ] = array[ index ++ ];
}
}
return new BufferAttribute( array2, itemSize );
}
function findMeshMaterial( data ) {
if ( ! data ) return undefined;
if ( 'rel material:binding' in data ) {
const reference = data[ 'rel material:binding' ];
const id = reference.replace( /^<\//, '' ).replace( />$/, '' );
const parts = id.split( '/' );
return findMaterial( root, ` "${ parts[ 1 ] }"` );
}
return findMaterial( data );
}
function findMaterial( data, id = '' ) {
for ( const name in data ) {
const object = data[ name ];
if ( name.startsWith( 'def Material' + id ) ) {
return object;
}
if ( typeof object === 'object' ) {
const material = findMaterial( object, id );
if ( material ) return material;
}
}
}
function setTextureParams( map, data_value ) {
// rotation, scale and translation
if ( data_value[ 'float inputs:rotation' ] ) {
map.rotation = parseFloat( data_value[ 'float inputs:rotation' ] );
}
if ( data_value[ 'float2 inputs:scale' ] ) {
map.repeat = new Vector2().fromArray( JSON.parse( '[' + data_value[ 'float2 inputs:scale' ].replace( /[()]*/g, '' ) + ']' ) );
}
if ( data_value[ 'float2 inputs:translation' ] ) {
map.offset = new Vector2().fromArray( JSON.parse( '[' + data_value[ 'float2 inputs:translation' ].replace( /[()]*/g, '' ) + ']' ) );
}
}
function buildMaterial( data ) {
const material = new MeshPhysicalMaterial();
if ( data !== undefined ) {
let surface = undefined;
const surfaceConnection = data[ 'token outputs:surface.connect' ];
if ( surfaceConnection ) {
const match = /(\w+)\.output/.exec( surfaceConnection );
if ( match ) {
const surfaceName = match[ 1 ];
surface = data[ `def Shader "${surfaceName}"` ];
}
}
if ( surface !== undefined ) {
if ( 'color3f inputs:diffuseColor.connect' in surface ) {
const path = surface[ 'color3f inputs:diffuseColor.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
material.map = buildTexture( sampler );
material.map.colorSpace = SRGBColorSpace;
if ( 'def Shader "Transform2d_diffuse"' in data ) {
setTextureParams( material.map, data[ 'def Shader "Transform2d_diffuse"' ] );
}
} else if ( 'color3f inputs:diffuseColor' in surface ) {
const color = surface[ 'color3f inputs:diffuseColor' ].replace( /[()]*/g, '' );
material.color.fromArray( JSON.parse( '[' + color + ']' ) );
}
if ( 'color3f inputs:emissiveColor.connect' in surface ) {
const path = surface[ 'color3f inputs:emissiveColor.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
material.emissiveMap = buildTexture( sampler );
material.emissiveMap.colorSpace = SRGBColorSpace;
material.emissive.set( 0xffffff );
if ( 'def Shader "Transform2d_emissive"' in data ) {
setTextureParams( material.emissiveMap, data[ 'def Shader "Transform2d_emissive"' ] );
}
} else if ( 'color3f inputs:emissiveColor' in surface ) {
const color = surface[ 'color3f inputs:emissiveColor' ].replace( /[()]*/g, '' );
material.emissive.fromArray( JSON.parse( '[' + color + ']' ) );
}
if ( 'normal3f inputs:normal.connect' in surface ) {
const path = surface[ 'normal3f inputs:normal.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
material.normalMap = buildTexture( sampler );
material.normalMap.colorSpace = NoColorSpace;
if ( 'def Shader "Transform2d_normal"' in data ) {
setTextureParams( material.normalMap, data[ 'def Shader "Transform2d_normal"' ] );
}
}
if ( 'float inputs:roughness.connect' in surface ) {
const path = surface[ 'float inputs:roughness.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
material.roughness = 1.0;
material.roughnessMap = buildTexture( sampler );
material.roughnessMap.colorSpace = NoColorSpace;
if ( 'def Shader "Transform2d_roughness"' in data ) {
setTextureParams( material.roughnessMap, data[ 'def Shader "Transform2d_roughness"' ] );
}
} else if ( 'float inputs:roughness' in surface ) {
material.roughness = parseFloat( surface[ 'float inputs:roughness' ] );
}
if ( 'float inputs:metallic.connect' in surface ) {
const path = surface[ 'float inputs:metallic.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
material.metalness = 1.0;
material.metalnessMap = buildTexture( sampler );
material.metalnessMap.colorSpace = NoColorSpace;
if ( 'def Shader "Transform2d_metallic"' in data ) {
setTextureParams( material.metalnessMap, data[ 'def Shader "Transform2d_metallic"' ] );
}
} else if ( 'float inputs:metallic' in surface ) {
material.metalness = parseFloat( surface[ 'float inputs:metallic' ] );
}
if ( 'float inputs:clearcoat.connect' in surface ) {
const path = surface[ 'float inputs:clearcoat.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
material.clearcoat = 1.0;
material.clearcoatMap = buildTexture( sampler );
material.clearcoatMap.colorSpace = NoColorSpace;
if ( 'def Shader "Transform2d_clearcoat"' in data ) {
setTextureParams( material.clearcoatMap, data[ 'def Shader "Transform2d_clearcoat"' ] );
}
} else if ( 'float inputs:clearcoat' in surface ) {
material.clearcoat = parseFloat( surface[ 'float inputs:clearcoat' ] );
}
if ( 'float inputs:clearcoatRoughness.connect' in surface ) {
const path = surface[ 'float inputs:clearcoatRoughness.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
material.clearcoatRoughness = 1.0;
material.clearcoatRoughnessMap = buildTexture( sampler );
material.clearcoatRoughnessMap.colorSpace = NoColorSpace;
if ( 'def Shader "Transform2d_clearcoatRoughness"' in data ) {
setTextureParams( material.clearcoatRoughnessMap, data[ 'def Shader "Transform2d_clearcoatRoughness"' ] );
}
} else if ( 'float inputs:clearcoatRoughness' in surface ) {
material.clearcoatRoughness = parseFloat( surface[ 'float inputs:clearcoatRoughness' ] );
}
if ( 'float inputs:ior' in surface ) {
material.ior = parseFloat( surface[ 'float inputs:ior' ] );
}
if ( 'float inputs:occlusion.connect' in surface ) {
const path = surface[ 'float inputs:occlusion.connect' ];
const sampler = findTexture( root, /(\w+).output/.exec( path )[ 1 ] );
material.aoMap = buildTexture( sampler );
material.aoMap.colorSpace = NoColorSpace;
if ( 'def Shader "Transform2d_occlusion"' in data ) {
setTextureParams( material.aoMap, data[ 'def Shader "Transform2d_occlusion"' ] );
}
}
}
}
return material;
}
function findTexture( data, id ) {
for ( const name in data ) {
const object = data[ name ];
if ( name.startsWith( `def Shader "${ id }"` ) ) {
return object;
}
if ( typeof object === 'object' ) {
const texture = findTexture( object, id );
if ( texture ) return texture;
}
}
}
function buildTexture( data ) {
if ( 'asset inputs:file' in data ) {
const path = data[ 'asset inputs:file' ].replace( /@*/g, '' ).trim();
const loader = new TextureLoader();
const texture = loader.load( assets[ path ] );
const map = {
'"clamp"': ClampToEdgeWrapping,
'"mirror"': MirroredRepeatWrapping,
'"repeat"': RepeatWrapping
};
if ( 'token inputs:wrapS' in data ) {
texture.wrapS = map[ data[ 'token inputs:wrapS' ] ];
}
if ( 'token inputs:wrapT' in data ) {
texture.wrapT = map[ data[ 'token inputs:wrapT' ] ];
}
return texture;
}
return null;
}
function buildObject( data ) {
const geometry = buildGeometry( findMeshGeometry( data ) );
const material = buildMaterial( findMeshMaterial( data ) );
const mesh = geometry ? new Mesh( geometry, material ) : new Object3D();
if ( 'matrix4d xformOp:transform' in data ) {
const array = JSON.parse( '[' + data[ 'matrix4d xformOp:transform' ].replace( /[()]*/g, '' ) + ']' );
mesh.matrix.fromArray( array );
mesh.matrix.decompose( mesh.position, mesh.quaternion, mesh.scale );
}
return mesh;
}
function buildHierarchy( data, group ) {
for ( const name in data ) {
if ( name.startsWith( 'def Scope' ) ) {
buildHierarchy( data[ name ], group );
} else if ( name.startsWith( 'def Xform' ) ) {
const mesh = buildObject( data[ name ] );
if ( /def Xform "(\w+)"/.test( name ) ) {
mesh.name = /def Xform "(\w+)"/.exec( name )[ 1 ];
}
group.add( mesh );
buildHierarchy( data[ name ], mesh );
}
}
}
function buildGroup( data ) {
const group = new Group();
buildHierarchy( data, group );
return group;
}
return buildGroup( root );
}
}
export { USDAParser };

View File

@@ -0,0 +1,17 @@
import {
Group
} from 'three';
class USDCParser {
parse( buffer ) {
// TODO
return new Group();
}
}
export { USDCParser };