main repo

This commit is contained in:
Basilosaurusrex
2025-11-24 18:09:40 +01:00
parent b636ee5e70
commit f027651f9b
34146 changed files with 4436636 additions and 0 deletions

View File

@@ -0,0 +1,311 @@
import { Color, ColorManagement, SRGBColorSpace } from 'three';
/* global DracoEncoderModule */
/**
* An exporter to compress geometry with the Draco library.
*
* [Draco]{@link https://google.github.io/draco/} is an open source library for compressing and
* decompressing 3D meshes and point clouds. Compressed geometry can be significantly smaller,
* at the cost of additional decoding time on the client device.
*
* Standalone Draco files have a `.drc` extension, and contain vertex positions,
* normals, colors, and other attributes. Draco files *do not* contain materials,
* textures, animation, or node hierarchies to use these features, embed Draco geometry
* inside of a glTF file. A normal glTF file can be converted to a Draco-compressed glTF file
* using [glTF-Pipeline]{@link https://github.com/AnalyticalGraphicsInc/gltf-pipeline}.
*
* ```js
* const exporter = new DRACOExporter();
* const data = exporter.parse( mesh, options );
* ```
*
* @three_import import { DRACOExporter } from 'three/addons/exporters/DRACOExporter.js';
*/
class DRACOExporter {
/**
* Parses the given mesh or point cloud and generates the Draco output.
*
* @param {(Mesh|Points)} object - The mesh or point cloud to export.
* @param {DRACOExporter~Options} options - The export options.
* @return {Int8Array} The exported Draco.
*/
parse( object, options = {} ) {
options = Object.assign( {
decodeSpeed: 5,
encodeSpeed: 5,
encoderMethod: DRACOExporter.MESH_EDGEBREAKER_ENCODING,
quantization: [ 16, 8, 8, 8, 8 ],
exportUvs: true,
exportNormals: true,
exportColor: false,
}, options );
if ( DracoEncoderModule === undefined ) {
throw new Error( 'THREE.DRACOExporter: required the draco_encoder to work.' );
}
const geometry = object.geometry;
const dracoEncoder = DracoEncoderModule();
const encoder = new dracoEncoder.Encoder();
let builder;
let dracoObject;
if ( object.isMesh === true ) {
builder = new dracoEncoder.MeshBuilder();
dracoObject = new dracoEncoder.Mesh();
const vertices = geometry.getAttribute( 'position' );
builder.AddFloatAttributeToMesh( dracoObject, dracoEncoder.POSITION, vertices.count, vertices.itemSize, vertices.array );
const faces = geometry.getIndex();
if ( faces !== null ) {
builder.AddFacesToMesh( dracoObject, faces.count / 3, faces.array );
} else {
const faces = new ( vertices.count > 65535 ? Uint32Array : Uint16Array )( vertices.count );
for ( let i = 0; i < faces.length; i ++ ) {
faces[ i ] = i;
}
builder.AddFacesToMesh( dracoObject, vertices.count, faces );
}
if ( options.exportNormals === true ) {
const normals = geometry.getAttribute( 'normal' );
if ( normals !== undefined ) {
builder.AddFloatAttributeToMesh( dracoObject, dracoEncoder.NORMAL, normals.count, normals.itemSize, normals.array );
}
}
if ( options.exportUvs === true ) {
const uvs = geometry.getAttribute( 'uv' );
if ( uvs !== undefined ) {
builder.AddFloatAttributeToMesh( dracoObject, dracoEncoder.TEX_COORD, uvs.count, uvs.itemSize, uvs.array );
}
}
if ( options.exportColor === true ) {
const colors = geometry.getAttribute( 'color' );
if ( colors !== undefined ) {
const array = createVertexColorSRGBArray( colors );
builder.AddFloatAttributeToMesh( dracoObject, dracoEncoder.COLOR, colors.count, colors.itemSize, array );
}
}
} else if ( object.isPoints === true ) {
builder = new dracoEncoder.PointCloudBuilder();
dracoObject = new dracoEncoder.PointCloud();
const vertices = geometry.getAttribute( 'position' );
builder.AddFloatAttribute( dracoObject, dracoEncoder.POSITION, vertices.count, vertices.itemSize, vertices.array );
if ( options.exportColor === true ) {
const colors = geometry.getAttribute( 'color' );
if ( colors !== undefined ) {
const array = createVertexColorSRGBArray( colors );
builder.AddFloatAttribute( dracoObject, dracoEncoder.COLOR, colors.count, colors.itemSize, array );
}
}
} else {
throw new Error( 'DRACOExporter: Unsupported object type.' );
}
//Compress using draco encoder
const encodedData = new dracoEncoder.DracoInt8Array();
//Sets the desired encoding and decoding speed for the given options from 0 (slowest speed, but the best compression) to 10 (fastest, but the worst compression).
const encodeSpeed = ( options.encodeSpeed !== undefined ) ? options.encodeSpeed : 5;
const decodeSpeed = ( options.decodeSpeed !== undefined ) ? options.decodeSpeed : 5;
encoder.SetSpeedOptions( encodeSpeed, decodeSpeed );
// Sets the desired encoding method for a given geometry.
if ( options.encoderMethod !== undefined ) {
encoder.SetEncodingMethod( options.encoderMethod );
}
// Sets the quantization (number of bits used to represent) compression options for a named attribute.
// The attribute values will be quantized in a box defined by the maximum extent of the attribute values.
if ( options.quantization !== undefined ) {
for ( let i = 0; i < 5; i ++ ) {
if ( options.quantization[ i ] !== undefined ) {
encoder.SetAttributeQuantization( i, options.quantization[ i ] );
}
}
}
let length;
if ( object.isMesh === true ) {
length = encoder.EncodeMeshToDracoBuffer( dracoObject, encodedData );
} else {
length = encoder.EncodePointCloudToDracoBuffer( dracoObject, true, encodedData );
}
dracoEncoder.destroy( dracoObject );
if ( length === 0 ) {
throw new Error( 'THREE.DRACOExporter: Draco encoding failed.' );
}
//Copy encoded data to buffer.
const outputData = new Int8Array( new ArrayBuffer( length ) );
for ( let i = 0; i < length; i ++ ) {
outputData[ i ] = encodedData.GetValue( i );
}
dracoEncoder.destroy( encodedData );
dracoEncoder.destroy( encoder );
dracoEncoder.destroy( builder );
return outputData;
}
}
function createVertexColorSRGBArray( attribute ) {
// While .drc files do not specify colorspace, the only 'official' tooling
// is PLY and OBJ converters, which use sRGB. We'll assume sRGB is expected
// for .drc files, but note that Draco buffers embedded in glTF files will
// be Linear-sRGB instead.
const _color = new Color();
const count = attribute.count;
const itemSize = attribute.itemSize;
const array = new Float32Array( count * itemSize );
for ( let i = 0, il = count; i < il; i ++ ) {
_color.fromBufferAttribute( attribute, i );
ColorManagement.workingToColorSpace( _color, SRGBColorSpace );
array[ i * itemSize ] = _color.r;
array[ i * itemSize + 1 ] = _color.g;
array[ i * itemSize + 2 ] = _color.b;
if ( itemSize === 4 ) {
array[ i * itemSize + 3 ] = attribute.getW( i );
}
}
return array;
}
// Encoder methods
/**
* Edgebreaker encoding.
*
* @static
* @constant
* @type {number}
* @default 1
*/
DRACOExporter.MESH_EDGEBREAKER_ENCODING = 1;
/**
* Sequential encoding.
*
* @static
* @constant
* @type {number}
* @default 0
*/
DRACOExporter.MESH_SEQUENTIAL_ENCODING = 0;
// Geometry type
DRACOExporter.POINT_CLOUD = 0;
DRACOExporter.TRIANGULAR_MESH = 1;
// Attribute type
DRACOExporter.INVALID = - 1;
DRACOExporter.POSITION = 0;
DRACOExporter.NORMAL = 1;
DRACOExporter.COLOR = 2;
DRACOExporter.TEX_COORD = 3;
DRACOExporter.GENERIC = 4;
/**
* Export options of `DRACOExporter`.
*
* @typedef {Object} DRACOExporter~Options
* @property {number} [decodeSpeed=5] - Indicates how to tune the encoder regarding decode speed (0 gives better speed but worst quality).
* @property {number} [encodeSpeed=5] - Indicates how to tune the encoder parameters (0 gives better speed but worst quality).
* @property {number} [encoderMethod=1] - Either sequential (very little compression) or Edgebreaker. Edgebreaker traverses the triangles of the mesh in a deterministic, spiral-like way which provides most of the benefits of this data format.
* @property {Array<number>} [quantization=[ 16, 8, 8, 8, 8 ]] - Indicates the precision of each type of data stored in the draco file in the order (POSITION, NORMAL, COLOR, TEX_COORD, GENERIC).
* @property {boolean} [exportUvs=true] - Whether to export UVs or not.
* @property {boolean} [exportNormals=true] - Whether to export normals or not.
* @property {boolean} [exportColor=false] - Whether to export colors or not.
**/
export { DRACOExporter };

View File

@@ -0,0 +1,618 @@
import {
FloatType,
HalfFloatType,
RGBAFormat,
DataUtils,
} from 'three';
import * as fflate from '../libs/fflate.module.js';
const textEncoder = new TextEncoder();
const NO_COMPRESSION = 0;
const ZIPS_COMPRESSION = 2;
const ZIP_COMPRESSION = 3;
/**
* An exporter for EXR.
*
* EXR ( Extended Dynamic Range) is an [open format specification]{@link https://github.com/AcademySoftwareFoundation/openexr}
* for professional-grade image storage format of the motion picture industry. The purpose of
* format is to accurately and efficiently represent high-dynamic-range scene-linear image data
* and associated metadata. The library is widely used in host application software where accuracy
* is critical, such as photorealistic rendering, texture access, image compositing, deep compositing,
* and DI.
*
* ```js
* const exporter = new EXRExporter();
* const result = await exporter.parse( renderer, options );
* ```
*
* @three_import import { EXRExporter } from 'three/addons/exporters/EXRExporter.js';
*/
class EXRExporter {
/**
* This method has two variants.
*
* - When exporting a data texture, it receives two parameters. The texture and the exporter options.
* - When exporting a render target (e.g. a PMREM), it receives three parameters. The renderer, the
* render target and the exporter options.
*
* @async
* @param {(DataTexture|WebGPURenderer|WebGLRenderer)} arg1 - The data texture to export or a renderer.
* @param {(EXRExporter~Options|RenderTarget)} arg2 - The exporter options or a render target.
* @param {EXRExporter~Options} [arg3] - The exporter options.
* @return {Promise<Uint8Array>} A Promise that resolves with the exported EXR.
*/
async parse( arg1, arg2, arg3 ) {
if ( ! arg1 || ! ( arg1.isWebGLRenderer || arg1.isWebGPURenderer || arg1.isDataTexture ) ) {
throw Error( 'EXRExporter.parse: Unsupported first parameter, expected instance of WebGLRenderer, WebGPURenderer or DataTexture.' );
} else if ( arg1.isWebGLRenderer || arg1.isWebGPURenderer ) {
const renderer = arg1, renderTarget = arg2, options = arg3;
supportedRTT( renderTarget );
const info = buildInfoRTT( renderTarget, options ),
dataBuffer = await getPixelData( renderer, renderTarget, info ),
rawContentBuffer = reorganizeDataBuffer( dataBuffer, info ),
chunks = compressData( rawContentBuffer, info );
return fillData( chunks, info );
} else if ( arg1.isDataTexture ) {
const texture = arg1, options = arg2;
supportedDT( texture );
const info = buildInfoDT( texture, options ),
dataBuffer = texture.image.data,
rawContentBuffer = reorganizeDataBuffer( dataBuffer, info ),
chunks = compressData( rawContentBuffer, info );
return fillData( chunks, info );
}
}
}
function supportedRTT( renderTarget ) {
if ( ! renderTarget || ! renderTarget.isRenderTarget ) {
throw Error( 'EXRExporter.parse: Unsupported second parameter, expected instance of WebGLRenderTarget.' );
}
if ( renderTarget.isWebGLCubeRenderTarget || renderTarget.isWebGL3DRenderTarget || renderTarget.isWebGLArrayRenderTarget ) {
throw Error( 'EXRExporter.parse: Unsupported render target type, expected instance of WebGLRenderTarget.' );
}
if ( renderTarget.texture.type !== FloatType && renderTarget.texture.type !== HalfFloatType ) {
throw Error( 'EXRExporter.parse: Unsupported WebGLRenderTarget texture type.' );
}
if ( renderTarget.texture.format !== RGBAFormat ) {
throw Error( 'EXRExporter.parse: Unsupported WebGLRenderTarget texture format, expected RGBAFormat.' );
}
}
function supportedDT( texture ) {
if ( texture.type !== FloatType && texture.type !== HalfFloatType ) {
throw Error( 'EXRExporter.parse: Unsupported DataTexture texture type.' );
}
if ( texture.format !== RGBAFormat ) {
throw Error( 'EXRExporter.parse: Unsupported DataTexture texture format, expected RGBAFormat.' );
}
if ( ! texture.image.data ) {
throw Error( 'EXRExporter.parse: Invalid DataTexture image data.' );
}
if ( texture.type === FloatType && texture.image.data.constructor.name !== 'Float32Array' ) {
throw Error( 'EXRExporter.parse: DataTexture image data doesn\'t match type, expected \'Float32Array\'.' );
}
if ( texture.type === HalfFloatType && texture.image.data.constructor.name !== 'Uint16Array' ) {
throw Error( 'EXRExporter.parse: DataTexture image data doesn\'t match type, expected \'Uint16Array\'.' );
}
}
function buildInfoRTT( renderTarget, options = {} ) {
const compressionSizes = {
0: 1,
2: 1,
3: 16
};
const WIDTH = renderTarget.width,
HEIGHT = renderTarget.height,
TYPE = renderTarget.texture.type,
FORMAT = renderTarget.texture.format,
COMPRESSION = ( options.compression !== undefined ) ? options.compression : ZIP_COMPRESSION,
EXPORTER_TYPE = ( options.type !== undefined ) ? options.type : HalfFloatType,
OUT_TYPE = ( EXPORTER_TYPE === FloatType ) ? 2 : 1,
COMPRESSION_SIZE = compressionSizes[ COMPRESSION ],
NUM_CHANNELS = 4;
return {
width: WIDTH,
height: HEIGHT,
type: TYPE,
format: FORMAT,
compression: COMPRESSION,
blockLines: COMPRESSION_SIZE,
dataType: OUT_TYPE,
dataSize: 2 * OUT_TYPE,
numBlocks: Math.ceil( HEIGHT / COMPRESSION_SIZE ),
numInputChannels: 4,
numOutputChannels: NUM_CHANNELS,
};
}
function buildInfoDT( texture, options = {} ) {
const compressionSizes = {
0: 1,
2: 1,
3: 16
};
const WIDTH = texture.image.width,
HEIGHT = texture.image.height,
TYPE = texture.type,
FORMAT = texture.format,
COMPRESSION = ( options.compression !== undefined ) ? options.compression : ZIP_COMPRESSION,
EXPORTER_TYPE = ( options.type !== undefined ) ? options.type : HalfFloatType,
OUT_TYPE = ( EXPORTER_TYPE === FloatType ) ? 2 : 1,
COMPRESSION_SIZE = compressionSizes[ COMPRESSION ],
NUM_CHANNELS = 4;
return {
width: WIDTH,
height: HEIGHT,
type: TYPE,
format: FORMAT,
compression: COMPRESSION,
blockLines: COMPRESSION_SIZE,
dataType: OUT_TYPE,
dataSize: 2 * OUT_TYPE,
numBlocks: Math.ceil( HEIGHT / COMPRESSION_SIZE ),
numInputChannels: 4,
numOutputChannels: NUM_CHANNELS,
};
}
async function getPixelData( renderer, rtt, info ) {
let dataBuffer;
if ( renderer.isWebGLRenderer ) {
if ( info.type === FloatType ) {
dataBuffer = new Float32Array( info.width * info.height * info.numInputChannels );
} else {
dataBuffer = new Uint16Array( info.width * info.height * info.numInputChannels );
}
await renderer.readRenderTargetPixelsAsync( rtt, 0, 0, info.width, info.height, dataBuffer );
} else {
dataBuffer = await renderer.readRenderTargetPixelsAsync( rtt, 0, 0, info.width, info.height );
}
return dataBuffer;
}
function reorganizeDataBuffer( inBuffer, info ) {
const w = info.width,
h = info.height,
dec = { r: 0, g: 0, b: 0, a: 0 },
offset = { value: 0 },
cOffset = ( info.numOutputChannels == 4 ) ? 1 : 0,
getValue = ( info.type == FloatType ) ? getFloat32 : getFloat16,
setValue = ( info.dataType == 1 ) ? setFloat16 : setFloat32,
outBuffer = new Uint8Array( info.width * info.height * info.numOutputChannels * info.dataSize ),
dv = new DataView( outBuffer.buffer );
for ( let y = 0; y < h; ++ y ) {
for ( let x = 0; x < w; ++ x ) {
const i = y * w * 4 + x * 4;
const r = getValue( inBuffer, i );
const g = getValue( inBuffer, i + 1 );
const b = getValue( inBuffer, i + 2 );
const a = getValue( inBuffer, i + 3 );
const line = ( h - y - 1 ) * w * ( 3 + cOffset ) * info.dataSize;
decodeLinear( dec, r, g, b, a );
offset.value = line + x * info.dataSize;
setValue( dv, dec.a, offset );
offset.value = line + ( cOffset ) * w * info.dataSize + x * info.dataSize;
setValue( dv, dec.b, offset );
offset.value = line + ( 1 + cOffset ) * w * info.dataSize + x * info.dataSize;
setValue( dv, dec.g, offset );
offset.value = line + ( 2 + cOffset ) * w * info.dataSize + x * info.dataSize;
setValue( dv, dec.r, offset );
}
}
return outBuffer;
}
function compressData( inBuffer, info ) {
let compress,
tmpBuffer,
sum = 0;
const chunks = { data: new Array(), totalSize: 0 },
size = info.width * info.numOutputChannels * info.blockLines * info.dataSize;
switch ( info.compression ) {
case 0:
compress = compressNONE;
break;
case 2:
case 3:
compress = compressZIP;
break;
}
if ( info.compression !== 0 ) {
tmpBuffer = new Uint8Array( size );
}
for ( let i = 0; i < info.numBlocks; ++ i ) {
const arr = inBuffer.subarray( size * i, size * ( i + 1 ) );
const block = compress( arr, tmpBuffer );
sum += block.length;
chunks.data.push( { dataChunk: block, size: block.length } );
}
chunks.totalSize = sum;
return chunks;
}
function compressNONE( data ) {
return data;
}
function compressZIP( data, tmpBuffer ) {
//
// Reorder the pixel data.
//
let t1 = 0,
t2 = Math.floor( ( data.length + 1 ) / 2 ),
s = 0;
const stop = data.length - 1;
while ( true ) {
if ( s > stop ) break;
tmpBuffer[ t1 ++ ] = data[ s ++ ];
if ( s > stop ) break;
tmpBuffer[ t2 ++ ] = data[ s ++ ];
}
//
// Predictor.
//
let p = tmpBuffer[ 0 ];
for ( let t = 1; t < tmpBuffer.length; t ++ ) {
const d = tmpBuffer[ t ] - p + ( 128 + 256 );
p = tmpBuffer[ t ];
tmpBuffer[ t ] = d;
}
const deflate = fflate.zlibSync( tmpBuffer );
return deflate;
}
function fillHeader( outBuffer, chunks, info ) {
const offset = { value: 0 };
const dv = new DataView( outBuffer.buffer );
setUint32( dv, 20000630, offset ); // magic
setUint32( dv, 2, offset ); // mask
// = HEADER =
setString( dv, 'compression', offset );
setString( dv, 'compression', offset );
setUint32( dv, 1, offset );
setUint8( dv, info.compression, offset );
setString( dv, 'screenWindowCenter', offset );
setString( dv, 'v2f', offset );
setUint32( dv, 8, offset );
setUint32( dv, 0, offset );
setUint32( dv, 0, offset );
setString( dv, 'screenWindowWidth', offset );
setString( dv, 'float', offset );
setUint32( dv, 4, offset );
setFloat32( dv, 1.0, offset );
setString( dv, 'pixelAspectRatio', offset );
setString( dv, 'float', offset );
setUint32( dv, 4, offset );
setFloat32( dv, 1.0, offset );
setString( dv, 'lineOrder', offset );
setString( dv, 'lineOrder', offset );
setUint32( dv, 1, offset );
setUint8( dv, 0, offset );
setString( dv, 'dataWindow', offset );
setString( dv, 'box2i', offset );
setUint32( dv, 16, offset );
setUint32( dv, 0, offset );
setUint32( dv, 0, offset );
setUint32( dv, info.width - 1, offset );
setUint32( dv, info.height - 1, offset );
setString( dv, 'displayWindow', offset );
setString( dv, 'box2i', offset );
setUint32( dv, 16, offset );
setUint32( dv, 0, offset );
setUint32( dv, 0, offset );
setUint32( dv, info.width - 1, offset );
setUint32( dv, info.height - 1, offset );
setString( dv, 'channels', offset );
setString( dv, 'chlist', offset );
setUint32( dv, info.numOutputChannels * 18 + 1, offset );
setString( dv, 'A', offset );
setUint32( dv, info.dataType, offset );
offset.value += 4;
setUint32( dv, 1, offset );
setUint32( dv, 1, offset );
setString( dv, 'B', offset );
setUint32( dv, info.dataType, offset );
offset.value += 4;
setUint32( dv, 1, offset );
setUint32( dv, 1, offset );
setString( dv, 'G', offset );
setUint32( dv, info.dataType, offset );
offset.value += 4;
setUint32( dv, 1, offset );
setUint32( dv, 1, offset );
setString( dv, 'R', offset );
setUint32( dv, info.dataType, offset );
offset.value += 4;
setUint32( dv, 1, offset );
setUint32( dv, 1, offset );
setUint8( dv, 0, offset );
// null-byte
setUint8( dv, 0, offset );
// = OFFSET TABLE =
let sum = offset.value + info.numBlocks * 8;
for ( let i = 0; i < chunks.data.length; ++ i ) {
setUint64( dv, sum, offset );
sum += chunks.data[ i ].size + 8;
}
}
function fillData( chunks, info ) {
const TableSize = info.numBlocks * 8,
HeaderSize = 259 + ( 18 * info.numOutputChannels ), // 259 + 18 * chlist
offset = { value: HeaderSize + TableSize },
outBuffer = new Uint8Array( HeaderSize + TableSize + chunks.totalSize + info.numBlocks * 8 ),
dv = new DataView( outBuffer.buffer );
fillHeader( outBuffer, chunks, info );
for ( let i = 0; i < chunks.data.length; ++ i ) {
const data = chunks.data[ i ].dataChunk;
const size = chunks.data[ i ].size;
setUint32( dv, i * info.blockLines, offset );
setUint32( dv, size, offset );
outBuffer.set( data, offset.value );
offset.value += size;
}
return outBuffer;
}
function decodeLinear( dec, r, g, b, a ) {
dec.r = r;
dec.g = g;
dec.b = b;
dec.a = a;
}
// function decodeSRGB( dec, r, g, b, a ) {
// dec.r = r > 0.04045 ? Math.pow( r * 0.9478672986 + 0.0521327014, 2.4 ) : r * 0.0773993808;
// dec.g = g > 0.04045 ? Math.pow( g * 0.9478672986 + 0.0521327014, 2.4 ) : g * 0.0773993808;
// dec.b = b > 0.04045 ? Math.pow( b * 0.9478672986 + 0.0521327014, 2.4 ) : b * 0.0773993808;
// dec.a = a;
// }
function setUint8( dv, value, offset ) {
dv.setUint8( offset.value, value );
offset.value += 1;
}
function setUint32( dv, value, offset ) {
dv.setUint32( offset.value, value, true );
offset.value += 4;
}
function setFloat16( dv, value, offset ) {
dv.setUint16( offset.value, DataUtils.toHalfFloat( value ), true );
offset.value += 2;
}
function setFloat32( dv, value, offset ) {
dv.setFloat32( offset.value, value, true );
offset.value += 4;
}
function setUint64( dv, value, offset ) {
dv.setBigUint64( offset.value, BigInt( value ), true );
offset.value += 8;
}
function setString( dv, string, offset ) {
const tmp = textEncoder.encode( string + '\0' );
for ( let i = 0; i < tmp.length; ++ i ) {
setUint8( dv, tmp[ i ], offset );
}
}
function decodeFloat16( binary ) {
const exponent = ( binary & 0x7C00 ) >> 10,
fraction = binary & 0x03FF;
return ( binary >> 15 ? - 1 : 1 ) * (
exponent ?
(
exponent === 0x1F ?
fraction ? NaN : Infinity :
Math.pow( 2, exponent - 15 ) * ( 1 + fraction / 0x400 )
) :
6.103515625e-5 * ( fraction / 0x400 )
);
}
function getFloat16( arr, i ) {
return decodeFloat16( arr[ i ] );
}
function getFloat32( arr, i ) {
return arr[ i ];
}
/**
* Export options of `EXRExporter`.
*
* @typedef {Object} EXRExporter~Options
* @property {(HalfFloatType|FloatType)} [type=HalfFloatType] - Output data type.
* @property {(NO_COMPRESSION|ZIP_COMPRESSION|ZIPS_COMPRESSION)} [type=ZIP_COMPRESSION] - The compression algorithm.
**/
export { EXRExporter, NO_COMPRESSION, ZIP_COMPRESSION, ZIPS_COMPRESSION };

3595
node_modules/three/examples/jsm/exporters/GLTFExporter.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,347 @@
import {
ColorManagement,
FloatType,
HalfFloatType,
UnsignedByteType,
RGBAFormat,
RGFormat,
RGIntegerFormat,
RedFormat,
RedIntegerFormat,
NoColorSpace,
LinearSRGBColorSpace,
SRGBColorSpace,
SRGBTransfer,
DataTexture,
REVISION,
} from 'three';
import {
createDefaultContainer,
write,
KHR_DF_CHANNEL_RGBSDA_ALPHA,
KHR_DF_CHANNEL_RGBSDA_BLUE,
KHR_DF_CHANNEL_RGBSDA_GREEN,
KHR_DF_CHANNEL_RGBSDA_RED,
KHR_DF_MODEL_RGBSDA,
KHR_DF_PRIMARIES_BT709,
KHR_DF_PRIMARIES_UNSPECIFIED,
KHR_DF_SAMPLE_DATATYPE_FLOAT,
KHR_DF_SAMPLE_DATATYPE_LINEAR,
KHR_DF_SAMPLE_DATATYPE_SIGNED,
KHR_DF_TRANSFER_LINEAR,
KHR_DF_TRANSFER_SRGB,
VK_FORMAT_R16_SFLOAT,
VK_FORMAT_R16G16_SFLOAT,
VK_FORMAT_R16G16B16A16_SFLOAT,
VK_FORMAT_R32_SFLOAT,
VK_FORMAT_R32G32_SFLOAT,
VK_FORMAT_R32G32B32A32_SFLOAT,
VK_FORMAT_R8_SRGB,
VK_FORMAT_R8_UNORM,
VK_FORMAT_R8G8_SRGB,
VK_FORMAT_R8G8_UNORM,
VK_FORMAT_R8G8B8A8_SRGB,
VK_FORMAT_R8G8B8A8_UNORM,
} from '../libs/ktx-parse.module.js';
/**
* References:
* - https://github.khronos.org/KTX-Specification/ktxspec.v2.html
* - https://registry.khronos.org/DataFormat/specs/1.3/dataformat.1.3.html
* - https://github.com/donmccurdy/KTX-Parse
*/
const VK_FORMAT_MAP = {
[ RGBAFormat ]: {
[ FloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R32G32B32A32_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R32G32B32A32_SFLOAT,
},
[ HalfFloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R16G16B16A16_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R16G16B16A16_SFLOAT,
},
[ UnsignedByteType ]: {
[ NoColorSpace ]: VK_FORMAT_R8G8B8A8_UNORM,
[ LinearSRGBColorSpace ]: VK_FORMAT_R8G8B8A8_UNORM,
[ SRGBColorSpace ]: VK_FORMAT_R8G8B8A8_SRGB,
},
},
[ RGFormat ]: {
[ FloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R32G32_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R32G32_SFLOAT,
},
[ HalfFloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R16G16_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R16G16_SFLOAT,
},
[ UnsignedByteType ]: {
[ NoColorSpace ]: VK_FORMAT_R8G8_UNORM,
[ LinearSRGBColorSpace ]: VK_FORMAT_R8G8_UNORM,
[ SRGBColorSpace ]: VK_FORMAT_R8G8_SRGB,
},
},
[ RedFormat ]: {
[ FloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R32_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R32_SFLOAT,
},
[ HalfFloatType ]: {
[ NoColorSpace ]: VK_FORMAT_R16_SFLOAT,
[ LinearSRGBColorSpace ]: VK_FORMAT_R16_SFLOAT,
},
[ UnsignedByteType ]: {
[ NoColorSpace ]: VK_FORMAT_R8_UNORM,
[ LinearSRGBColorSpace ]: VK_FORMAT_R8_UNORM,
[ SRGBColorSpace ]: VK_FORMAT_R8_SRGB,
},
},
};
const KHR_DF_CHANNEL_MAP = [
KHR_DF_CHANNEL_RGBSDA_RED,
KHR_DF_CHANNEL_RGBSDA_GREEN,
KHR_DF_CHANNEL_RGBSDA_BLUE,
KHR_DF_CHANNEL_RGBSDA_ALPHA,
];
// TODO: sampleLower and sampleUpper may change based on color space.
const KHR_DF_CHANNEL_SAMPLE_LOWER_UPPER = {
[ FloatType ]: [ 0xbf800000, 0x3f800000 ],
[ HalfFloatType ]: [ 0xbf800000, 0x3f800000 ],
[ UnsignedByteType ]: [ 0, 255 ],
};
const ERROR_INPUT = 'THREE.KTX2Exporter: Supported inputs are DataTexture, Data3DTexture, or WebGLRenderer and WebGLRenderTarget.';
const ERROR_FORMAT = 'THREE.KTX2Exporter: Supported formats are RGBAFormat, RGFormat, or RedFormat.';
const ERROR_TYPE = 'THREE.KTX2Exporter: Supported types are FloatType, HalfFloatType, or UnsignedByteType."';
const ERROR_COLOR_SPACE = 'THREE.KTX2Exporter: Supported color spaces are SRGBColorSpace (UnsignedByteType only), LinearSRGBColorSpace, or NoColorSpace.';
/**
* An exporter for KTX2.
*
* ```js
* const exporter = new KTX2Exporter();
* const result = await exporter.parse( dataTexture );
* ```
*
* @three_import import { KTX2Exporter } from 'three/addons/exporters/KTX2Exporter.js';
*/
export class KTX2Exporter {
/**
* This method has two variants.
*
* - When exporting a data texture, it receives one parameter. The data or 3D data texture.
* - When exporting a render target (e.g. a PMREM), it receives two parameters. The renderer and the
* render target.
*
* @async
* @param {(DataTexture|Data3DTexture|WebGPURenderer|WebGLRenderer)} arg1 - The data texture to export or a renderer.
* @param {RenderTarget} [arg2] - The render target that should be exported
* @return {Promise<Uint8Array>} A Promise that resolves with the exported KTX2.
*/
async parse( arg1, arg2 ) {
let texture;
if ( arg1.isDataTexture || arg1.isData3DTexture ) {
texture = arg1;
} else if ( ( arg1.isWebGLRenderer || arg1.isWebGPURenderer ) && arg2.isRenderTarget ) {
texture = await toDataTexture( arg1, arg2 );
} else {
throw new Error( ERROR_INPUT );
}
if ( VK_FORMAT_MAP[ texture.format ] === undefined ) {
throw new Error( ERROR_FORMAT );
}
if ( VK_FORMAT_MAP[ texture.format ][ texture.type ] === undefined ) {
throw new Error( ERROR_TYPE );
}
if ( VK_FORMAT_MAP[ texture.format ][ texture.type ][ texture.colorSpace ] === undefined ) {
throw new Error( ERROR_COLOR_SPACE );
}
//
const array = texture.image.data;
const channelCount = getChannelCount( texture );
const container = createDefaultContainer();
container.vkFormat = VK_FORMAT_MAP[ texture.format ][ texture.type ][ texture.colorSpace ];
container.typeSize = array.BYTES_PER_ELEMENT;
container.pixelWidth = texture.image.width;
container.pixelHeight = texture.image.height;
if ( texture.isData3DTexture ) {
container.pixelDepth = texture.image.depth;
}
//
const basicDesc = container.dataFormatDescriptor[ 0 ];
basicDesc.colorModel = KHR_DF_MODEL_RGBSDA;
basicDesc.colorPrimaries = texture.colorSpace === NoColorSpace
? KHR_DF_PRIMARIES_UNSPECIFIED
: KHR_DF_PRIMARIES_BT709;
basicDesc.transferFunction = ColorManagement.getTransfer( texture.colorSpace ) === SRGBTransfer
? KHR_DF_TRANSFER_SRGB
: KHR_DF_TRANSFER_LINEAR;
basicDesc.texelBlockDimension = [ 0, 0, 0, 0 ];
basicDesc.bytesPlane = [
container.typeSize * channelCount, 0, 0, 0, 0, 0, 0, 0,
];
for ( let i = 0; i < channelCount; ++ i ) {
let channelType = KHR_DF_CHANNEL_MAP[ i ];
// Assign KHR_DF_SAMPLE_DATATYPE_LINEAR if the channel is linear _and_ differs from the transfer function.
if ( channelType === KHR_DF_CHANNEL_RGBSDA_ALPHA && basicDesc.transferFunction !== KHR_DF_TRANSFER_LINEAR ) {
channelType |= KHR_DF_SAMPLE_DATATYPE_LINEAR;
}
if ( texture.type === FloatType || texture.type === HalfFloatType ) {
channelType |= KHR_DF_SAMPLE_DATATYPE_FLOAT;
channelType |= KHR_DF_SAMPLE_DATATYPE_SIGNED;
}
basicDesc.samples.push( {
channelType: channelType,
bitOffset: i * array.BYTES_PER_ELEMENT * 8,
bitLength: array.BYTES_PER_ELEMENT * 8 - 1,
samplePosition: [ 0, 0, 0, 0 ],
sampleLower: KHR_DF_CHANNEL_SAMPLE_LOWER_UPPER[ texture.type ][ 0 ],
sampleUpper: KHR_DF_CHANNEL_SAMPLE_LOWER_UPPER[ texture.type ][ 1 ],
} );
}
//
container.levelCount = 1;
container.levels = [ {
levelData: new Uint8Array( array.buffer, array.byteOffset, array.byteLength ),
uncompressedByteLength: array.byteLength,
} ];
//
container.keyValue[ 'KTXwriter' ] = `three.js ${ REVISION }`;
//
return write( container, { keepWriter: true } );
}
}
async function toDataTexture( renderer, rtt ) {
const channelCount = getChannelCount( rtt.texture );
let view;
if ( renderer.isWebGLRenderer ) {
if ( rtt.texture.type === FloatType ) {
view = new Float32Array( rtt.width * rtt.height * channelCount );
} else if ( rtt.texture.type === HalfFloatType ) {
view = new Uint16Array( rtt.width * rtt.height * channelCount );
} else if ( rtt.texture.type === UnsignedByteType ) {
view = new Uint8Array( rtt.width * rtt.height * channelCount );
} else {
throw new Error( ERROR_TYPE );
}
await renderer.readRenderTargetPixelsAsync( rtt, 0, 0, rtt.width, rtt.height, view );
} else {
view = await renderer.readRenderTargetPixelsAsync( rtt, 0, 0, rtt.width, rtt.height );
}
const texture = new DataTexture( view, rtt.width, rtt.height, rtt.texture.format, rtt.texture.type );
texture.colorSpace = rtt.texture.colorSpace;
return texture;
}
function getChannelCount( texture ) {
switch ( texture.format ) {
case RGBAFormat:
return 4;
case RGFormat:
case RGIntegerFormat:
return 2;
case RedFormat:
case RedIntegerFormat:
return 1;
default:
throw new Error( ERROR_FORMAT );
}
}

View File

@@ -0,0 +1,308 @@
import {
Color,
ColorManagement,
Matrix3,
SRGBColorSpace,
Vector2,
Vector3
} from 'three';
/**
* An exporter for OBJ.
*
* `OBJExporter` is not able to export material data into MTL files so only geometry data are supported.
*
* ```js
* const exporter = new OBJExporter();
* const data = exporter.parse( scene );
* ```
*
* @three_import import { OBJExporter } from 'three/addons/exporters/OBJExporter.js';
*/
class OBJExporter {
/**
* Parses the given 3D object and generates the OBJ output.
*
* If the 3D object is composed of multiple children and geometry, they are merged into a single mesh in the file.
*
* @param {Object3D} object - The 3D object to export.
* @return {string} The exported OBJ.
*/
parse( object ) {
let output = '';
let indexVertex = 0;
let indexVertexUvs = 0;
let indexNormals = 0;
const vertex = new Vector3();
const color = new Color();
const normal = new Vector3();
const uv = new Vector2();
const face = [];
function parseMesh( mesh ) {
let nbVertex = 0;
let nbNormals = 0;
let nbVertexUvs = 0;
const geometry = mesh.geometry;
const normalMatrixWorld = new Matrix3();
// shortcuts
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const uvs = geometry.getAttribute( 'uv' );
const indices = geometry.getIndex();
// name of the mesh object
output += 'o ' + mesh.name + '\n';
// name of the mesh material
if ( mesh.material && mesh.material.name ) {
output += 'usemtl ' + mesh.material.name + '\n';
}
// vertices
if ( vertices !== undefined ) {
for ( let i = 0, l = vertices.count; i < l; i ++, nbVertex ++ ) {
vertex.fromBufferAttribute( vertices, i );
// transform the vertex to world space
vertex.applyMatrix4( mesh.matrixWorld );
// transform the vertex to export format
output += 'v ' + vertex.x + ' ' + vertex.y + ' ' + vertex.z + '\n';
}
}
// uvs
if ( uvs !== undefined ) {
for ( let i = 0, l = uvs.count; i < l; i ++, nbVertexUvs ++ ) {
uv.fromBufferAttribute( uvs, i );
// transform the uv to export format
output += 'vt ' + uv.x + ' ' + uv.y + '\n';
}
}
// normals
if ( normals !== undefined ) {
normalMatrixWorld.getNormalMatrix( mesh.matrixWorld );
for ( let i = 0, l = normals.count; i < l; i ++, nbNormals ++ ) {
normal.fromBufferAttribute( normals, i );
// transform the normal to world space
normal.applyMatrix3( normalMatrixWorld ).normalize();
// transform the normal to export format
output += 'vn ' + normal.x + ' ' + normal.y + ' ' + normal.z + '\n';
}
}
// faces
if ( indices !== null ) {
for ( let i = 0, l = indices.count; i < l; i += 3 ) {
for ( let m = 0; m < 3; m ++ ) {
const j = indices.getX( i + m ) + 1;
face[ m ] = ( indexVertex + j ) + ( normals || uvs ? '/' + ( uvs ? ( indexVertexUvs + j ) : '' ) + ( normals ? '/' + ( indexNormals + j ) : '' ) : '' );
}
// transform the face to export format
output += 'f ' + face.join( ' ' ) + '\n';
}
} else {
for ( let i = 0, l = vertices.count; i < l; i += 3 ) {
for ( let m = 0; m < 3; m ++ ) {
const j = i + m + 1;
face[ m ] = ( indexVertex + j ) + ( normals || uvs ? '/' + ( uvs ? ( indexVertexUvs + j ) : '' ) + ( normals ? '/' + ( indexNormals + j ) : '' ) : '' );
}
// transform the face to export format
output += 'f ' + face.join( ' ' ) + '\n';
}
}
// update index
indexVertex += nbVertex;
indexVertexUvs += nbVertexUvs;
indexNormals += nbNormals;
}
function parseLine( line ) {
let nbVertex = 0;
const geometry = line.geometry;
const type = line.type;
// shortcuts
const vertices = geometry.getAttribute( 'position' );
// name of the line object
output += 'o ' + line.name + '\n';
if ( vertices !== undefined ) {
for ( let i = 0, l = vertices.count; i < l; i ++, nbVertex ++ ) {
vertex.fromBufferAttribute( vertices, i );
// transform the vertex to world space
vertex.applyMatrix4( line.matrixWorld );
// transform the vertex to export format
output += 'v ' + vertex.x + ' ' + vertex.y + ' ' + vertex.z + '\n';
}
}
if ( type === 'Line' ) {
output += 'l ';
for ( let j = 1, l = vertices.count; j <= l; j ++ ) {
output += ( indexVertex + j ) + ' ';
}
output += '\n';
}
if ( type === 'LineSegments' ) {
for ( let j = 1, k = j + 1, l = vertices.count; j < l; j += 2, k = j + 1 ) {
output += 'l ' + ( indexVertex + j ) + ' ' + ( indexVertex + k ) + '\n';
}
}
// update index
indexVertex += nbVertex;
}
function parsePoints( points ) {
let nbVertex = 0;
const geometry = points.geometry;
const vertices = geometry.getAttribute( 'position' );
const colors = geometry.getAttribute( 'color' );
output += 'o ' + points.name + '\n';
if ( vertices !== undefined ) {
for ( let i = 0, l = vertices.count; i < l; i ++, nbVertex ++ ) {
vertex.fromBufferAttribute( vertices, i );
vertex.applyMatrix4( points.matrixWorld );
output += 'v ' + vertex.x + ' ' + vertex.y + ' ' + vertex.z;
if ( colors !== undefined ) {
color.fromBufferAttribute( colors, i );
ColorManagement.workingToColorSpace( color, SRGBColorSpace );
output += ' ' + color.r + ' ' + color.g + ' ' + color.b;
}
output += '\n';
}
output += 'p ';
for ( let j = 1, l = vertices.count; j <= l; j ++ ) {
output += ( indexVertex + j ) + ' ';
}
output += '\n';
}
// update index
indexVertex += nbVertex;
}
object.traverse( function ( child ) {
if ( child.isMesh === true ) {
parseMesh( child );
}
if ( child.isLine === true ) {
parseLine( child );
}
if ( child.isPoints === true ) {
parsePoints( child );
}
} );
return output;
}
}
export { OBJExporter };

View File

@@ -0,0 +1,562 @@
import {
Matrix3,
Vector3,
Color,
ColorManagement,
SRGBColorSpace
} from 'three';
/**
* An exporter for PLY.
*
* PLY (Polygon or Stanford Triangle Format) is a file format for efficient delivery and
* loading of simple, static 3D content in a dense format. Both binary and ascii formats are
* supported. PLY can store vertex positions, colors, normals and uv coordinates. No textures
* or texture references are saved.
*
* ```js
* const exporter = new PLYExporter();
* const data = exporter.parse( scene, options );
* ```
*
* @three_import import { PLYExporter } from 'three/addons/exporters/PLYExporter.js';
*/
class PLYExporter {
/**
* Parses the given 3D object and generates the PLY output.
*
* If the 3D object is composed of multiple children and geometry, they are merged into a single mesh in the file.
*
* @param {Object3D} object - The 3D object to export.
* @param {PLYExporter~OnDone} onDone - A callback function that is executed when the export has finished.
* @param {PLYExporter~Options} options - The export options.
* @return {?(string|ArrayBuffer)} The exported PLY.
*/
parse( object, onDone, options = {} ) {
// reference https://github.com/gkjohnson/ply-exporter-js
// Iterate over the valid meshes in the object
function traverseMeshes( cb ) {
object.traverse( function ( child ) {
if ( child.isMesh === true || child.isPoints ) {
const mesh = child;
const geometry = mesh.geometry;
if ( geometry.hasAttribute( 'position' ) === true ) {
cb( mesh, geometry );
}
}
} );
}
// Default options
const defaultOptions = {
binary: false,
excludeAttributes: [], // normal, uv, color, index
littleEndian: false
};
options = Object.assign( defaultOptions, options );
const excludeAttributes = options.excludeAttributes;
let includeIndices = true;
let includeNormals = false;
let includeColors = false;
let includeUVs = false;
// count the vertices, check which properties are used,
// and cache the BufferGeometry
let vertexCount = 0;
let faceCount = 0;
object.traverse( function ( child ) {
if ( child.isMesh === true ) {
const mesh = child;
const geometry = mesh.geometry;
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const uvs = geometry.getAttribute( 'uv' );
const colors = geometry.getAttribute( 'color' );
const indices = geometry.getIndex();
if ( vertices === undefined ) {
return;
}
vertexCount += vertices.count;
faceCount += indices ? indices.count / 3 : vertices.count / 3;
if ( normals !== undefined ) includeNormals = true;
if ( uvs !== undefined ) includeUVs = true;
if ( colors !== undefined ) includeColors = true;
} else if ( child.isPoints ) {
const mesh = child;
const geometry = mesh.geometry;
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const colors = geometry.getAttribute( 'color' );
vertexCount += vertices.count;
if ( normals !== undefined ) includeNormals = true;
if ( colors !== undefined ) includeColors = true;
includeIndices = false;
}
} );
const tempColor = new Color();
includeIndices = includeIndices && excludeAttributes.indexOf( 'index' ) === - 1;
includeNormals = includeNormals && excludeAttributes.indexOf( 'normal' ) === - 1;
includeColors = includeColors && excludeAttributes.indexOf( 'color' ) === - 1;
includeUVs = includeUVs && excludeAttributes.indexOf( 'uv' ) === - 1;
if ( includeIndices && faceCount !== Math.floor( faceCount ) ) {
// point cloud meshes will not have an index array and may not have a
// number of vertices that is divisible by 3 (and therefore representable
// as triangles)
console.error(
'PLYExporter: Failed to generate a valid PLY file with triangle indices because the ' +
'number of indices is not divisible by 3.'
);
return null;
}
const indexByteCount = 4;
let header =
'ply\n' +
`format ${ options.binary ? ( options.littleEndian ? 'binary_little_endian' : 'binary_big_endian' ) : 'ascii' } 1.0\n` +
`element vertex ${vertexCount}\n` +
// position
'property float x\n' +
'property float y\n' +
'property float z\n';
if ( includeNormals === true ) {
// normal
header +=
'property float nx\n' +
'property float ny\n' +
'property float nz\n';
}
if ( includeUVs === true ) {
// uvs
header +=
'property float s\n' +
'property float t\n';
}
if ( includeColors === true ) {
// colors
header +=
'property uchar red\n' +
'property uchar green\n' +
'property uchar blue\n';
}
if ( includeIndices === true ) {
// faces
header +=
`element face ${faceCount}\n` +
'property list uchar int vertex_index\n';
}
header += 'end_header\n';
// Generate attribute data
const vertex = new Vector3();
const normalMatrixWorld = new Matrix3();
let result = null;
if ( options.binary === true ) {
// Binary File Generation
const headerBin = new TextEncoder().encode( header );
// 3 position values at 4 bytes
// 3 normal values at 4 bytes
// 3 color channels with 1 byte
// 2 uv values at 4 bytes
const vertexListLength = vertexCount * ( 4 * 3 + ( includeNormals ? 4 * 3 : 0 ) + ( includeColors ? 3 : 0 ) + ( includeUVs ? 4 * 2 : 0 ) );
// 1 byte shape descriptor
// 3 vertex indices at ${indexByteCount} bytes
const faceListLength = includeIndices ? faceCount * ( indexByteCount * 3 + 1 ) : 0;
const output = new DataView( new ArrayBuffer( headerBin.length + vertexListLength + faceListLength ) );
new Uint8Array( output.buffer ).set( headerBin, 0 );
let vOffset = headerBin.length;
let fOffset = headerBin.length + vertexListLength;
let writtenVertices = 0;
traverseMeshes( function ( mesh, geometry ) {
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const uvs = geometry.getAttribute( 'uv' );
const colors = geometry.getAttribute( 'color' );
const indices = geometry.getIndex();
normalMatrixWorld.getNormalMatrix( mesh.matrixWorld );
for ( let i = 0, l = vertices.count; i < l; i ++ ) {
vertex.fromBufferAttribute( vertices, i );
vertex.applyMatrix4( mesh.matrixWorld );
// Position information
output.setFloat32( vOffset, vertex.x, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, vertex.y, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, vertex.z, options.littleEndian );
vOffset += 4;
// Normal information
if ( includeNormals === true ) {
if ( normals != null ) {
vertex.fromBufferAttribute( normals, i );
vertex.applyMatrix3( normalMatrixWorld ).normalize();
output.setFloat32( vOffset, vertex.x, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, vertex.y, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, vertex.z, options.littleEndian );
vOffset += 4;
} else {
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
}
}
// UV information
if ( includeUVs === true ) {
if ( uvs != null ) {
output.setFloat32( vOffset, uvs.getX( i ), options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, uvs.getY( i ), options.littleEndian );
vOffset += 4;
} else {
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
output.setFloat32( vOffset, 0, options.littleEndian );
vOffset += 4;
}
}
// Color information
if ( includeColors === true ) {
if ( colors != null ) {
tempColor.fromBufferAttribute( colors, i );
ColorManagement.workingToColorSpace( tempColor, SRGBColorSpace );
output.setUint8( vOffset, Math.floor( tempColor.r * 255 ) );
vOffset += 1;
output.setUint8( vOffset, Math.floor( tempColor.g * 255 ) );
vOffset += 1;
output.setUint8( vOffset, Math.floor( tempColor.b * 255 ) );
vOffset += 1;
} else {
output.setUint8( vOffset, 255 );
vOffset += 1;
output.setUint8( vOffset, 255 );
vOffset += 1;
output.setUint8( vOffset, 255 );
vOffset += 1;
}
}
}
if ( includeIndices === true ) {
// Create the face list
if ( indices !== null ) {
for ( let i = 0, l = indices.count; i < l; i += 3 ) {
output.setUint8( fOffset, 3 );
fOffset += 1;
output.setUint32( fOffset, indices.getX( i + 0 ) + writtenVertices, options.littleEndian );
fOffset += indexByteCount;
output.setUint32( fOffset, indices.getX( i + 1 ) + writtenVertices, options.littleEndian );
fOffset += indexByteCount;
output.setUint32( fOffset, indices.getX( i + 2 ) + writtenVertices, options.littleEndian );
fOffset += indexByteCount;
}
} else {
for ( let i = 0, l = vertices.count; i < l; i += 3 ) {
output.setUint8( fOffset, 3 );
fOffset += 1;
output.setUint32( fOffset, writtenVertices + i, options.littleEndian );
fOffset += indexByteCount;
output.setUint32( fOffset, writtenVertices + i + 1, options.littleEndian );
fOffset += indexByteCount;
output.setUint32( fOffset, writtenVertices + i + 2, options.littleEndian );
fOffset += indexByteCount;
}
}
}
// Save the amount of verts we've already written so we can offset
// the face index on the next mesh
writtenVertices += vertices.count;
} );
result = output.buffer;
} else {
// Ascii File Generation
// count the number of vertices
let writtenVertices = 0;
let vertexList = '';
let faceList = '';
traverseMeshes( function ( mesh, geometry ) {
const vertices = geometry.getAttribute( 'position' );
const normals = geometry.getAttribute( 'normal' );
const uvs = geometry.getAttribute( 'uv' );
const colors = geometry.getAttribute( 'color' );
const indices = geometry.getIndex();
normalMatrixWorld.getNormalMatrix( mesh.matrixWorld );
// form each line
for ( let i = 0, l = vertices.count; i < l; i ++ ) {
vertex.fromBufferAttribute( vertices, i );
vertex.applyMatrix4( mesh.matrixWorld );
// Position information
let line =
vertex.x + ' ' +
vertex.y + ' ' +
vertex.z;
// Normal information
if ( includeNormals === true ) {
if ( normals != null ) {
vertex.fromBufferAttribute( normals, i );
vertex.applyMatrix3( normalMatrixWorld ).normalize();
line += ' ' +
vertex.x + ' ' +
vertex.y + ' ' +
vertex.z;
} else {
line += ' 0 0 0';
}
}
// UV information
if ( includeUVs === true ) {
if ( uvs != null ) {
line += ' ' +
uvs.getX( i ) + ' ' +
uvs.getY( i );
} else {
line += ' 0 0';
}
}
// Color information
if ( includeColors === true ) {
if ( colors != null ) {
tempColor.fromBufferAttribute( colors, i );
ColorManagement.workingToColorSpace( tempColor, SRGBColorSpace );
line += ' ' +
Math.floor( tempColor.r * 255 ) + ' ' +
Math.floor( tempColor.g * 255 ) + ' ' +
Math.floor( tempColor.b * 255 );
} else {
line += ' 255 255 255';
}
}
vertexList += line + '\n';
}
// Create the face list
if ( includeIndices === true ) {
if ( indices !== null ) {
for ( let i = 0, l = indices.count; i < l; i += 3 ) {
faceList += `3 ${ indices.getX( i + 0 ) + writtenVertices }`;
faceList += ` ${ indices.getX( i + 1 ) + writtenVertices }`;
faceList += ` ${ indices.getX( i + 2 ) + writtenVertices }\n`;
}
} else {
for ( let i = 0, l = vertices.count; i < l; i += 3 ) {
faceList += `3 ${ writtenVertices + i } ${ writtenVertices + i + 1 } ${ writtenVertices + i + 2 }\n`;
}
}
faceCount += indices ? indices.count / 3 : vertices.count / 3;
}
writtenVertices += vertices.count;
} );
result = `${ header }${vertexList}${ includeIndices ? `${faceList}\n` : '\n' }`;
}
if ( typeof onDone === 'function' ) requestAnimationFrame( () => onDone( result ) );
return result;
}
}
/**
* Export options of `PLYExporter`.
*
* @typedef {Object} PLYExporter~Options
* @property {boolean} [binary=false] - Whether to export in binary format or ASCII.
* @property {Array<string>} [excludeAttributes] - Which properties to explicitly exclude from
* the exported PLY file. Valid values are `'color'`, `'normal'`, `'uv'`, and `'index'`. If triangle
* indices are excluded, then a point cloud is exported.
* @property {boolean} [littleEndian=false] - Whether the binary export uses little or big endian.
**/
/**
* onDone callback of `PLYExporter`.
*
* @callback PLYExporter~OnDone
* @param {string|ArrayBuffer} result - The generated PLY ascii or binary.
*/
export { PLYExporter };

View File

@@ -0,0 +1,221 @@
import { Vector3 } from 'three';
/**
* An exporter for STL.
*
* STL files describe only the surface geometry of a three-dimensional object without
* any representation of color, texture or other common model attributes. The STL format
* specifies both ASCII and binary representations, with binary being more compact.
* STL files contain no scale information or indexes, and the units are arbitrary.
*
* ```js
* const exporter = new STLExporter();
* const data = exporter.parse( mesh, { binary: true } );
* ```
*
* @three_import import { STLExporter } from 'three/addons/exporters/STLExporter.js';
*/
class STLExporter {
/**
* Parses the given 3D object and generates the STL output.
*
* If the 3D object is composed of multiple children and geometry, they are merged into a single mesh in the file.
*
* @param {Object3D} scene - A scene, mesh or any other 3D object containing meshes to encode.
* @param {STLExporter~Options} options - The export options.
* @return {string|ArrayBuffer} The exported STL.
*/
parse( scene, options = {} ) {
options = Object.assign( {
binary: false
}, options );
const binary = options.binary;
//
const objects = [];
let triangles = 0;
scene.traverse( function ( object ) {
if ( object.isMesh ) {
const geometry = object.geometry;
const index = geometry.index;
const positionAttribute = geometry.getAttribute( 'position' );
triangles += ( index !== null ) ? ( index.count / 3 ) : ( positionAttribute.count / 3 );
objects.push( {
object3d: object,
geometry: geometry
} );
}
} );
let output;
let offset = 80; // skip header
if ( binary === true ) {
const bufferLength = triangles * 2 + triangles * 3 * 4 * 4 + 80 + 4;
const arrayBuffer = new ArrayBuffer( bufferLength );
output = new DataView( arrayBuffer );
output.setUint32( offset, triangles, true ); offset += 4;
} else {
output = '';
output += 'solid exported\n';
}
const vA = new Vector3();
const vB = new Vector3();
const vC = new Vector3();
const cb = new Vector3();
const ab = new Vector3();
const normal = new Vector3();
for ( let i = 0, il = objects.length; i < il; i ++ ) {
const object = objects[ i ].object3d;
const geometry = objects[ i ].geometry;
const index = geometry.index;
const positionAttribute = geometry.getAttribute( 'position' );
if ( index !== null ) {
// indexed geometry
for ( let j = 0; j < index.count; j += 3 ) {
const a = index.getX( j + 0 );
const b = index.getX( j + 1 );
const c = index.getX( j + 2 );
writeFace( a, b, c, positionAttribute, object );
}
} else {
// non-indexed geometry
for ( let j = 0; j < positionAttribute.count; j += 3 ) {
const a = j + 0;
const b = j + 1;
const c = j + 2;
writeFace( a, b, c, positionAttribute, object );
}
}
}
if ( binary === false ) {
output += 'endsolid exported\n';
}
return output;
function writeFace( a, b, c, positionAttribute, object ) {
vA.fromBufferAttribute( positionAttribute, a );
vB.fromBufferAttribute( positionAttribute, b );
vC.fromBufferAttribute( positionAttribute, c );
if ( object.isSkinnedMesh === true ) {
object.applyBoneTransform( a, vA );
object.applyBoneTransform( b, vB );
object.applyBoneTransform( c, vC );
}
vA.applyMatrix4( object.matrixWorld );
vB.applyMatrix4( object.matrixWorld );
vC.applyMatrix4( object.matrixWorld );
writeNormal( vA, vB, vC );
writeVertex( vA );
writeVertex( vB );
writeVertex( vC );
if ( binary === true ) {
output.setUint16( offset, 0, true ); offset += 2;
} else {
output += '\t\tendloop\n';
output += '\tendfacet\n';
}
}
function writeNormal( vA, vB, vC ) {
cb.subVectors( vC, vB );
ab.subVectors( vA, vB );
cb.cross( ab ).normalize();
normal.copy( cb ).normalize();
if ( binary === true ) {
output.setFloat32( offset, normal.x, true ); offset += 4;
output.setFloat32( offset, normal.y, true ); offset += 4;
output.setFloat32( offset, normal.z, true ); offset += 4;
} else {
output += '\tfacet normal ' + normal.x + ' ' + normal.y + ' ' + normal.z + '\n';
output += '\t\touter loop\n';
}
}
function writeVertex( vertex ) {
if ( binary === true ) {
output.setFloat32( offset, vertex.x, true ); offset += 4;
output.setFloat32( offset, vertex.y, true ); offset += 4;
output.setFloat32( offset, vertex.z, true ); offset += 4;
} else {
output += '\t\t\tvertex ' + vertex.x + ' ' + vertex.y + ' ' + vertex.z + '\n';
}
}
}
}
/**
* Export options of `STLExporter`.
*
* @typedef {Object} STLExporter~Options
* @property {boolean} [binary=false] - Whether to export in binary format or ASCII.
**/
export { STLExporter };

1228
node_modules/three/examples/jsm/exporters/USDZExporter.js generated vendored Normal file

File diff suppressed because it is too large Load Diff