- remove some useless console.log
    - remove useless files
This commit is contained in:
Kum1ta
2024-11-18 18:08:14 +01:00
parent 9f34d9b554
commit 17a0321532
342 changed files with 86 additions and 158195 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,89 @@
/**
* ACES Filmic Tone Mapping Shader by Stephen Hill
* source: https://github.com/selfshadow/ltc_code/blob/master/webgl/shaders/ltc/ltc_blit.fs
*
* this implementation of ACES is modified to accommodate a brighter viewing environment.
* the scale factor of 1/0.6 is subjective. see discussion in #19621.
*/
const ACESFilmicToneMappingShader = {
name: 'ACESFilmicToneMappingShader',
uniforms: {
'tDiffuse': { value: null },
'exposure': { value: 1.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
#define saturate(a) clamp( a, 0.0, 1.0 )
uniform sampler2D tDiffuse;
uniform float exposure;
varying vec2 vUv;
vec3 RRTAndODTFit( vec3 v ) {
vec3 a = v * ( v + 0.0245786 ) - 0.000090537;
vec3 b = v * ( 0.983729 * v + 0.4329510 ) + 0.238081;
return a / b;
}
vec3 ACESFilmicToneMapping( vec3 color ) {
// sRGB => XYZ => D65_2_D60 => AP1 => RRT_SAT
const mat3 ACESInputMat = mat3(
vec3( 0.59719, 0.07600, 0.02840 ), // transposed from source
vec3( 0.35458, 0.90834, 0.13383 ),
vec3( 0.04823, 0.01566, 0.83777 )
);
// ODT_SAT => XYZ => D60_2_D65 => sRGB
const mat3 ACESOutputMat = mat3(
vec3( 1.60475, -0.10208, -0.00327 ), // transposed from source
vec3( -0.53108, 1.10813, -0.07276 ),
vec3( -0.07367, -0.00605, 1.07602 )
);
color = ACESInputMat * color;
// Apply RRT and ODT
color = RRTAndODTFit( color );
color = ACESOutputMat * color;
// Clamp to [0, 1]
return saturate( color );
}
void main() {
vec4 tex = texture2D( tDiffuse, vUv );
tex.rgb *= exposure / 0.6; // pre-exposed, outside of the tone mapping function
gl_FragColor = vec4( ACESFilmicToneMapping( tex.rgb ), tex.a );
}`
};
export { ACESFilmicToneMappingShader };

View File

@ -0,0 +1,521 @@
import {
BufferGeometry,
Color,
FileLoader,
Float32BufferAttribute,
Group,
Loader,
Mesh,
MeshPhongMaterial
} from '/static/javascript/three/build/three.module.js';
import * as fflate from '../libs/fflate.module.js';
/**
* Description: Early release of an AMF Loader following the pattern of the
* example loaders in the three.js project.
*
* Usage:
* const loader = new AMFLoader();
* loader.load('/path/to/project.amf', function(objecttree) {
* scene.add(objecttree);
* });
*
* Materials now supported, material colors supported
* Zip support, requires fflate
* No constellation support (yet)!
*
*/
class AMFLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
parse( data ) {
function loadDocument( data ) {
let view = new DataView( data );
const magic = String.fromCharCode( view.getUint8( 0 ), view.getUint8( 1 ) );
if ( magic === 'PK' ) {
let zip = null;
let file = null;
console.log( 'THREE.AMFLoader: Loading Zip' );
try {
zip = fflate.unzipSync( new Uint8Array( data ) );
} catch ( e ) {
if ( e instanceof ReferenceError ) {
console.log( 'THREE.AMFLoader: fflate missing and file is compressed.' );
return null;
}
}
for ( file in zip ) {
if ( file.toLowerCase().slice( - 4 ) === '.amf' ) {
break;
}
}
console.log( 'THREE.AMFLoader: Trying to load file asset: ' + file );
view = new DataView( zip[ file ].buffer );
}
const fileText = new TextDecoder().decode( view );
const xmlData = new DOMParser().parseFromString( fileText, 'application/xml' );
if ( xmlData.documentElement.nodeName.toLowerCase() !== 'amf' ) {
console.log( 'THREE.AMFLoader: Error loading AMF - no AMF document found.' );
return null;
}
return xmlData;
}
function loadDocumentScale( node ) {
let scale = 1.0;
let unit = 'millimeter';
if ( node.documentElement.attributes.unit !== undefined ) {
unit = node.documentElement.attributes.unit.value.toLowerCase();
}
const scaleUnits = {
millimeter: 1.0,
inch: 25.4,
feet: 304.8,
meter: 1000.0,
micron: 0.001
};
if ( scaleUnits[ unit ] !== undefined ) {
scale = scaleUnits[ unit ];
}
console.log( 'THREE.AMFLoader: Unit scale: ' + scale );
return scale;
}
function loadMaterials( node ) {
let matName = 'AMF Material';
const matId = node.attributes.id.textContent;
let color = { r: 1.0, g: 1.0, b: 1.0, a: 1.0 };
let loadedMaterial = null;
for ( let i = 0; i < node.childNodes.length; i ++ ) {
const matChildEl = node.childNodes[ i ];
if ( matChildEl.nodeName === 'metadata' && matChildEl.attributes.type !== undefined ) {
if ( matChildEl.attributes.type.value === 'name' ) {
matName = matChildEl.textContent;
}
} else if ( matChildEl.nodeName === 'color' ) {
color = loadColor( matChildEl );
}
}
loadedMaterial = new MeshPhongMaterial( {
flatShading: true,
color: new Color( color.r, color.g, color.b ),
name: matName
} );
if ( color.a !== 1.0 ) {
loadedMaterial.transparent = true;
loadedMaterial.opacity = color.a;
}
return { id: matId, material: loadedMaterial };
}
function loadColor( node ) {
const color = { r: 1.0, g: 1.0, b: 1.0, a: 1.0 };
for ( let i = 0; i < node.childNodes.length; i ++ ) {
const matColor = node.childNodes[ i ];
if ( matColor.nodeName === 'r' ) {
color.r = matColor.textContent;
} else if ( matColor.nodeName === 'g' ) {
color.g = matColor.textContent;
} else if ( matColor.nodeName === 'b' ) {
color.b = matColor.textContent;
} else if ( matColor.nodeName === 'a' ) {
color.a = matColor.textContent;
}
}
return color;
}
function loadMeshVolume( node ) {
const volume = { name: '', triangles: [], materialid: null };
let currVolumeNode = node.firstElementChild;
if ( node.attributes.materialid !== undefined ) {
volume.materialId = node.attributes.materialid.nodeValue;
}
while ( currVolumeNode ) {
if ( currVolumeNode.nodeName === 'metadata' ) {
if ( currVolumeNode.attributes.type !== undefined ) {
if ( currVolumeNode.attributes.type.value === 'name' ) {
volume.name = currVolumeNode.textContent;
}
}
} else if ( currVolumeNode.nodeName === 'triangle' ) {
const v1 = currVolumeNode.getElementsByTagName( 'v1' )[ 0 ].textContent;
const v2 = currVolumeNode.getElementsByTagName( 'v2' )[ 0 ].textContent;
const v3 = currVolumeNode.getElementsByTagName( 'v3' )[ 0 ].textContent;
volume.triangles.push( v1, v2, v3 );
}
currVolumeNode = currVolumeNode.nextElementSibling;
}
return volume;
}
function loadMeshVertices( node ) {
const vertArray = [];
const normalArray = [];
let currVerticesNode = node.firstElementChild;
while ( currVerticesNode ) {
if ( currVerticesNode.nodeName === 'vertex' ) {
let vNode = currVerticesNode.firstElementChild;
while ( vNode ) {
if ( vNode.nodeName === 'coordinates' ) {
const x = vNode.getElementsByTagName( 'x' )[ 0 ].textContent;
const y = vNode.getElementsByTagName( 'y' )[ 0 ].textContent;
const z = vNode.getElementsByTagName( 'z' )[ 0 ].textContent;
vertArray.push( x, y, z );
} else if ( vNode.nodeName === 'normal' ) {
const nx = vNode.getElementsByTagName( 'nx' )[ 0 ].textContent;
const ny = vNode.getElementsByTagName( 'ny' )[ 0 ].textContent;
const nz = vNode.getElementsByTagName( 'nz' )[ 0 ].textContent;
normalArray.push( nx, ny, nz );
}
vNode = vNode.nextElementSibling;
}
}
currVerticesNode = currVerticesNode.nextElementSibling;
}
return { 'vertices': vertArray, 'normals': normalArray };
}
function loadObject( node ) {
const objId = node.attributes.id.textContent;
const loadedObject = { name: 'amfobject', meshes: [] };
let currColor = null;
let currObjNode = node.firstElementChild;
while ( currObjNode ) {
if ( currObjNode.nodeName === 'metadata' ) {
if ( currObjNode.attributes.type !== undefined ) {
if ( currObjNode.attributes.type.value === 'name' ) {
loadedObject.name = currObjNode.textContent;
}
}
} else if ( currObjNode.nodeName === 'color' ) {
currColor = loadColor( currObjNode );
} else if ( currObjNode.nodeName === 'mesh' ) {
let currMeshNode = currObjNode.firstElementChild;
const mesh = { vertices: [], normals: [], volumes: [], color: currColor };
while ( currMeshNode ) {
if ( currMeshNode.nodeName === 'vertices' ) {
const loadedVertices = loadMeshVertices( currMeshNode );
mesh.normals = mesh.normals.concat( loadedVertices.normals );
mesh.vertices = mesh.vertices.concat( loadedVertices.vertices );
} else if ( currMeshNode.nodeName === 'volume' ) {
mesh.volumes.push( loadMeshVolume( currMeshNode ) );
}
currMeshNode = currMeshNode.nextElementSibling;
}
loadedObject.meshes.push( mesh );
}
currObjNode = currObjNode.nextElementSibling;
}
return { 'id': objId, 'obj': loadedObject };
}
const xmlData = loadDocument( data );
let amfName = '';
let amfAuthor = '';
const amfScale = loadDocumentScale( xmlData );
const amfMaterials = {};
const amfObjects = {};
const childNodes = xmlData.documentElement.childNodes;
let i, j;
for ( i = 0; i < childNodes.length; i ++ ) {
const child = childNodes[ i ];
if ( child.nodeName === 'metadata' ) {
if ( child.attributes.type !== undefined ) {
if ( child.attributes.type.value === 'name' ) {
amfName = child.textContent;
} else if ( child.attributes.type.value === 'author' ) {
amfAuthor = child.textContent;
}
}
} else if ( child.nodeName === 'material' ) {
const loadedMaterial = loadMaterials( child );
amfMaterials[ loadedMaterial.id ] = loadedMaterial.material;
} else if ( child.nodeName === 'object' ) {
const loadedObject = loadObject( child );
amfObjects[ loadedObject.id ] = loadedObject.obj;
}
}
const sceneObject = new Group();
const defaultMaterial = new MeshPhongMaterial( {
name: Loader.DEFAULT_MATERIAL_NAME,
color: 0xaaaaff,
flatShading: true
} );
sceneObject.name = amfName;
sceneObject.userData.author = amfAuthor;
sceneObject.userData.loader = 'AMF';
for ( const id in amfObjects ) {
const part = amfObjects[ id ];
const meshes = part.meshes;
const newObject = new Group();
newObject.name = part.name || '';
for ( i = 0; i < meshes.length; i ++ ) {
let objDefaultMaterial = defaultMaterial;
const mesh = meshes[ i ];
const vertices = new Float32BufferAttribute( mesh.vertices, 3 );
let normals = null;
if ( mesh.normals.length ) {
normals = new Float32BufferAttribute( mesh.normals, 3 );
}
if ( mesh.color ) {
const color = mesh.color;
objDefaultMaterial = defaultMaterial.clone();
objDefaultMaterial.color = new Color( color.r, color.g, color.b );
if ( color.a !== 1.0 ) {
objDefaultMaterial.transparent = true;
objDefaultMaterial.opacity = color.a;
}
}
const volumes = mesh.volumes;
for ( j = 0; j < volumes.length; j ++ ) {
const volume = volumes[ j ];
const newGeometry = new BufferGeometry();
let material = objDefaultMaterial;
newGeometry.setIndex( volume.triangles );
newGeometry.setAttribute( 'position', vertices.clone() );
if ( normals ) {
newGeometry.setAttribute( 'normal', normals.clone() );
}
if ( amfMaterials[ volume.materialId ] !== undefined ) {
material = amfMaterials[ volume.materialId ];
}
newGeometry.scale( amfScale, amfScale, amfScale );
newObject.add( new Mesh( newGeometry, material.clone() ) );
}
}
sceneObject.add( newObject );
}
return sceneObject;
}
}
export { AMFLoader };

View File

@ -0,0 +1,232 @@
class ARButton {
static createButton( renderer, sessionInit = {} ) {
const button = document.createElement( 'button' );
function showStartAR( /*device*/ ) {
if ( sessionInit.domOverlay === undefined ) {
const overlay = document.createElement( 'div' );
overlay.style.display = 'none';
document.body.appendChild( overlay );
const svg = document.createElementNS( 'http://www.w3.org/2000/svg', 'svg' );
svg.setAttribute( 'width', 38 );
svg.setAttribute( 'height', 38 );
svg.style.position = 'absolute';
svg.style.right = '20px';
svg.style.top = '20px';
svg.addEventListener( 'click', function () {
currentSession.end();
} );
overlay.appendChild( svg );
const path = document.createElementNS( 'http://www.w3.org/2000/svg', 'path' );
path.setAttribute( 'd', 'M 12,12 L 28,28 M 28,12 12,28' );
path.setAttribute( 'stroke', '#fff' );
path.setAttribute( 'stroke-width', 2 );
svg.appendChild( path );
if ( sessionInit.optionalFeatures === undefined ) {
sessionInit.optionalFeatures = [];
}
sessionInit.optionalFeatures.push( 'dom-overlay' );
sessionInit.domOverlay = { root: overlay };
}
//
let currentSession = null;
async function onSessionStarted( session ) {
session.addEventListener( 'end', onSessionEnded );
renderer.xr.setReferenceSpaceType( 'local' );
await renderer.xr.setSession( session );
button.textContent = 'STOP AR';
sessionInit.domOverlay.root.style.display = '';
currentSession = session;
}
function onSessionEnded( /*event*/ ) {
currentSession.removeEventListener( 'end', onSessionEnded );
button.textContent = 'START AR';
sessionInit.domOverlay.root.style.display = 'none';
currentSession = null;
}
//
button.style.display = '';
button.style.cursor = 'pointer';
button.style.left = 'calc(50% - 50px)';
button.style.width = '100px';
button.textContent = 'START AR';
button.onmouseenter = function () {
button.style.opacity = '1.0';
};
button.onmouseleave = function () {
button.style.opacity = '0.5';
};
button.onclick = function () {
if ( currentSession === null ) {
navigator.xr.requestSession( 'immersive-ar', sessionInit ).then( onSessionStarted );
} else {
currentSession.end();
if ( navigator.xr.offerSession !== undefined ) {
navigator.xr.offerSession( 'immersive-ar', sessionInit )
.then( onSessionStarted )
.catch( ( err ) => {
console.warn( err );
} );
}
}
};
if ( navigator.xr.offerSession !== undefined ) {
navigator.xr.offerSession( 'immersive-ar', sessionInit )
.then( onSessionStarted )
.catch( ( err ) => {
console.warn( err );
} );
}
}
function disableButton() {
button.style.display = '';
button.style.cursor = 'auto';
button.style.left = 'calc(50% - 75px)';
button.style.width = '150px';
button.onmouseenter = null;
button.onmouseleave = null;
button.onclick = null;
}
function showARNotSupported() {
disableButton();
button.textContent = 'AR NOT SUPPORTED';
}
function showARNotAllowed( exception ) {
disableButton();
console.warn( 'Exception when trying to call xr.isSessionSupported', exception );
button.textContent = 'AR NOT ALLOWED';
}
function stylizeElement( element ) {
element.style.position = 'absolute';
element.style.bottom = '20px';
element.style.padding = '12px 6px';
element.style.border = '1px solid #fff';
element.style.borderRadius = '4px';
element.style.background = 'rgba(0,0,0,0.1)';
element.style.color = '#fff';
element.style.font = 'normal 13px sans-serif';
element.style.textAlign = 'center';
element.style.opacity = '0.5';
element.style.outline = 'none';
element.style.zIndex = '999';
}
if ( 'xr' in navigator ) {
button.id = 'ARButton';
button.style.display = 'none';
stylizeElement( button );
navigator.xr.isSessionSupported( 'immersive-ar' ).then( function ( supported ) {
supported ? showStartAR() : showARNotSupported();
} ).catch( showARNotAllowed );
return button;
} else {
const message = document.createElement( 'a' );
if ( window.isSecureContext === false ) {
message.href = document.location.href.replace( /^http:/, 'https:' );
message.innerHTML = 'WEBXR NEEDS HTTPS'; // TODO Improve message
} else {
message.href = 'https://immersiveweb.dev/';
message.innerHTML = 'WEBXR NOT AVAILABLE';
}
message.style.left = 'calc(50% - 90px)';
message.style.width = '180px';
message.style.textDecoration = 'none';
stylizeElement( message );
return message;
}
}
}
export { ARButton };

View File

@ -0,0 +1,296 @@
export * from './animation/AnimationClipCreator.js';
export * from './animation/CCDIKSolver.js';
export * from './animation/MMDAnimationHelper.js';
export * from './animation/MMDPhysics.js';
export * from './cameras/CinematicCamera.js';
export { default as WebGL } from './capabilities/WebGL.js';
export * from './controls/ArcballControls.js';
export * from './controls/DragControls.js';
export * from './controls/FirstPersonControls.js';
export * from './controls/FlyControls.js';
export * from './controls/MapControls.js';
export * from './controls/OrbitControls.js';
export * from './controls/PointerLockControls.js';
export * from './controls/TrackballControls.js';
export * from './controls/TransformControls.js';
export * from './csm/CSM.js';
export * from './csm/CSMFrustum.js';
export * from './csm/CSMHelper.js';
export * from './csm/CSMShader.js';
export * as Curves from './curves/CurveExtras.js';
export * from './curves/NURBSCurve.js';
export * from './curves/NURBSSurface.js';
export * from './curves/NURBSVolume.js';
export * as NURBSUtils from './curves/NURBSUtils.js';
export * from './effects/AnaglyphEffect.js';
export * from './effects/AsciiEffect.js';
export * from './effects/OutlineEffect.js';
export * from './effects/ParallaxBarrierEffect.js';
export * from './effects/PeppersGhostEffect.js';
export * from './effects/StereoEffect.js';
export * from './environments/DebugEnvironment.js';
export * from './environments/RoomEnvironment.js';
export * from './exporters/DRACOExporter.js';
export * from './exporters/EXRExporter.js';
export * from './exporters/GLTFExporter.js';
export * from './exporters/KTX2Exporter.js';
export * from './exporters/MMDExporter.js';
export * from './exporters/OBJExporter.js';
export * from './exporters/PLYExporter.js';
export * from './exporters/STLExporter.js';
export * from './exporters/USDZExporter.js';
export * from './geometries/BoxLineGeometry.js';
export * from './geometries/ConvexGeometry.js';
export * from './geometries/DecalGeometry.js';
export * from './geometries/ParametricGeometries.js';
export * from './geometries/ParametricGeometry.js';
export * from './geometries/RoundedBoxGeometry.js';
export * from './geometries/TeapotGeometry.js';
export * from './geometries/TextGeometry.js';
export * from './helpers/LightProbeHelper.js';
export * from './helpers/OctreeHelper.js';
export * from './helpers/PositionalAudioHelper.js';
export * from './helpers/RectAreaLightHelper.js';
export * from './helpers/TextureHelper.js';
export * from './helpers/VertexNormalsHelper.js';
export * from './helpers/VertexTangentsHelper.js';
export * from './helpers/ViewHelper.js';
export * from './interactive/HTMLMesh.js';
export * from './interactive/InteractiveGroup.js';
export * from './interactive/SelectionBox.js';
export * from './interactive/SelectionHelper.js';
export * from './lights/LightProbeGenerator.js';
export * from './lights/RectAreaLightTexturesLib.js';
export * from './lights/RectAreaLightUniformsLib.js';
export * from './lines/Line2.js';
export * from './lines/LineGeometry.js';
export * from './lines/LineMaterial.js';
export * from './lines/LineSegments2.js';
export * from './lines/LineSegmentsGeometry.js';
export * from './lines/Wireframe.js';
export * from './lines/WireframeGeometry2.js';
export * from './loaders/3DMLoader.js';
export * from './loaders/3MFLoader.js';
export * from './loaders/AMFLoader.js';
export * from './loaders/BVHLoader.js';
export * from './loaders/ColladaLoader.js';
export * from './loaders/DDSLoader.js';
export * from './loaders/DRACOLoader.js';
export * from './loaders/EXRLoader.js';
export * from './loaders/FBXLoader.js';
export * from './loaders/FontLoader.js';
export * from './loaders/GCodeLoader.js';
export * from './loaders/GLTFLoader.js';
export * from './loaders/HDRCubeTextureLoader.js';
export * from './loaders/IESLoader.js';
export * from './loaders/KMZLoader.js';
export * from './loaders/KTX2Loader.js';
export * from './loaders/KTXLoader.js';
export * from './loaders/LDrawLoader.js';
export * from './loaders/LUT3dlLoader.js';
export * from './loaders/LUTCubeLoader.js';
export * from './loaders/LWOLoader.js';
export * from './loaders/LogLuvLoader.js';
export * from './loaders/LottieLoader.js';
export * from './loaders/MD2Loader.js';
export * from './loaders/MDDLoader.js';
export * from './loaders/MMDLoader.js';
export * from './loaders/MTLLoader.js';
export * from './loaders/NRRDLoader.js';
export * from './loaders/OBJLoader.js';
export * from './loaders/PCDLoader.js';
export * from './loaders/PDBLoader.js';
export * from './loaders/PLYLoader.js';
export * from './loaders/PVRLoader.js';
export * from './loaders/RGBELoader.js';
export * from './loaders/UltraHDRLoader.js';
export * from './loaders/RGBMLoader.js';
export * from './loaders/STLLoader.js';
export * from './loaders/SVGLoader.js';
export * from './loaders/TDSLoader.js';
export * from './loaders/TGALoader.js';
export * from './loaders/TIFFLoader.js';
export * from './loaders/TTFLoader.js';
export * from './loaders/TiltLoader.js';
export * from './loaders/USDZLoader.js';
export * from './loaders/VOXLoader.js';
export * from './loaders/VRMLLoader.js';
export * from './loaders/VTKLoader.js';
export * from './loaders/XYZLoader.js';
export * from './materials/MeshGouraudMaterial.js';
export * from './math/Capsule.js';
export * from './math/ColorConverter.js';
export * from './math/ConvexHull.js';
export * from './math/ImprovedNoise.js';
export * from './math/Lut.js';
export * from './math/MeshSurfaceSampler.js';
export * from './math/OBB.js';
export * from './math/Octree.js';
export * from './math/SimplexNoise.js';
export * from './misc/ConvexObjectBreaker.js';
export * from './misc/GPUComputationRenderer.js';
export * from './misc/Gyroscope.js';
export * from './misc/MD2Character.js';
export * from './misc/MD2CharacterComplex.js';
export * from './misc/MorphAnimMesh.js';
export * from './misc/MorphBlendMesh.js';
export * from './misc/ProgressiveLightMap.js';
export * from './misc/RollerCoaster.js';
export * from './misc/Timer.js';
export * from './misc/TubePainter.js';
export * from './misc/Volume.js';
export * from './misc/VolumeSlice.js';
export * from './modifiers/CurveModifier.js';
export * from './modifiers/EdgeSplitModifier.js';
export * from './modifiers/SimplifyModifier.js';
export * from './modifiers/TessellateModifier.js';
export * from './objects/GroundedSkybox.js';
export * from './objects/Lensflare.js';
export * from './objects/MarchingCubes.js';
export * from './objects/Reflector.js';
export * from './objects/ReflectorForSSRPass.js';
export * from './objects/Refractor.js';
export * from './objects/ShadowMesh.js';
export * from './objects/Sky.js';
export * from './objects/Water.js';
export { Water as Water2 } from './objects/Water2.js';
export * from './physics/AmmoPhysics.js';
export * from './physics/RapierPhysics.js';
export * from './postprocessing/AfterimagePass.js';
export * from './postprocessing/BloomPass.js';
export * from './postprocessing/BokehPass.js';
export * from './postprocessing/ClearPass.js';
export * from './postprocessing/CubeTexturePass.js';
export * from './postprocessing/DotScreenPass.js';
export * from './postprocessing/EffectComposer.js';
export * from './postprocessing/FilmPass.js';
export * from './postprocessing/GlitchPass.js';
export * from './postprocessing/GTAOPass.js';
export * from './postprocessing/HalftonePass.js';
export * from './postprocessing/LUTPass.js';
export * from './postprocessing/MaskPass.js';
export * from './postprocessing/OutlinePass.js';
export * from './postprocessing/OutputPass.js';
export * from './postprocessing/Pass.js';
export * from './postprocessing/RenderPass.js';
export * from './postprocessing/RenderPixelatedPass.js';
export * from './postprocessing/SAOPass.js';
export * from './postprocessing/SMAAPass.js';
export * from './postprocessing/SSAARenderPass.js';
export * from './postprocessing/SSAOPass.js';
export * from './postprocessing/SSRPass.js';
export * from './postprocessing/SavePass.js';
export * from './postprocessing/ShaderPass.js';
export * from './postprocessing/TAARenderPass.js';
export * from './postprocessing/TexturePass.js';
export * from './postprocessing/UnrealBloomPass.js';
export * from './renderers/CSS2DRenderer.js';
export * from './renderers/CSS3DRenderer.js';
export * from './renderers/Projector.js';
export * from './renderers/SVGRenderer.js';
export * from './shaders/ACESFilmicToneMappingShader.js';
export * from './shaders/AfterimageShader.js';
export * from './shaders/BasicShader.js';
export * from './shaders/BleachBypassShader.js';
export * from './shaders/BlendShader.js';
export * from './shaders/BokehShader.js';
export { BokehShader as BokehShader2 } from './shaders/BokehShader2.js';
export * from './shaders/BrightnessContrastShader.js';
export * from './shaders/ColorCorrectionShader.js';
export * from './shaders/ColorifyShader.js';
export * from './shaders/ConvolutionShader.js';
export * from './shaders/CopyShader.js';
export * from './shaders/DOFMipMapShader.js';
export * from './shaders/DepthLimitedBlurShader.js';
export * from './shaders/DigitalGlitch.js';
export * from './shaders/DotScreenShader.js';
export * from './shaders/ExposureShader.js';
export * from './shaders/FXAAShader.js';
export * from './shaders/FilmShader.js';
export * from './shaders/FocusShader.js';
export * from './shaders/FreiChenShader.js';
export * from './shaders/GammaCorrectionShader.js';
export * from './shaders/GodRaysShader.js';
export * from './shaders/GTAOShader.js';
export * from './shaders/HalftoneShader.js';
export * from './shaders/HorizontalBlurShader.js';
export * from './shaders/HorizontalTiltShiftShader.js';
export * from './shaders/HueSaturationShader.js';
export * from './shaders/KaleidoShader.js';
export * from './shaders/LuminosityHighPassShader.js';
export * from './shaders/LuminosityShader.js';
export * from './shaders/MMDToonShader.js';
export * from './shaders/MirrorShader.js';
export * from './shaders/NormalMapShader.js';
export * from './shaders/OutputShader.js';
export * from './shaders/RGBShiftShader.js';
export * from './shaders/SAOShader.js';
export * from './shaders/SMAAShader.js';
export * from './shaders/SSAOShader.js';
export * from './shaders/SSRShader.js';
export * from './shaders/SepiaShader.js';
export * from './shaders/SobelOperatorShader.js';
export * from './shaders/SubsurfaceScatteringShader.js';
export * from './shaders/TechnicolorShader.js';
export * from './shaders/ToonShader.js';
export * from './shaders/TriangleBlurShader.js';
export * from './shaders/UnpackDepthRGBAShader.js';
export * from './shaders/VelocityShader.js';
export * from './shaders/VerticalBlurShader.js';
export * from './shaders/VerticalTiltShiftShader.js';
export * from './shaders/VignetteShader.js';
export * from './shaders/VolumeShader.js';
export * from './shaders/WaterRefractionShader.js';
export * from './textures/FlakesTexture.js';
export * as BufferGeometryUtils from './utils/BufferGeometryUtils.js';
export * as CameraUtils from './utils/CameraUtils.js';
export * from './utils/GPUStatsPanel.js';
export * as GeometryCompressionUtils from './utils/GeometryCompressionUtils.js';
export * as GeometryUtils from './utils/GeometryUtils.js';
export * from './utils/LDrawUtils.js';
export * from './utils/PackedPhongMaterial.js';
export * as SceneUtils from './utils/SceneUtils.js';
export * from './utils/ShadowMapViewer.js';
export * as SkeletonUtils from './utils/SkeletonUtils.js';
export * as SortUtils from './utils/SortUtils.js';
export * from './utils/TextureUtils.js';
export * from './utils/UVsDebug.js';
export * from './utils/WorkerPool.js';
export * from './webxr/ARButton.js';
export * from './webxr/OculusHandModel.js';
export * from './webxr/OculusHandPointerModel.js';
export * from './webxr/Text2D.js';
export * from './webxr/VRButton.js';
export * from './webxr/XRButton.js';
export * from './webxr/XRControllerModelFactory.js';
export * from './webxr/XREstimatedLight.js';
export * from './webxr/XRHandMeshModel.js';
export * from './webxr/XRHandModelFactory.js';
export * from './webxr/XRHandPrimitiveModel.js';
export * from './webxr/XRPlanes.js';

View File

@ -0,0 +1,104 @@
import {
HalfFloatType,
MeshBasicMaterial,
NearestFilter,
ShaderMaterial,
UniformsUtils,
WebGLRenderTarget
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from'/static/javascript/three/examples/jsm/postprocessing/Pass.js';
import { AfterimageShader } from'/static/javascript/three/examples/jsm/shaders/AfterimageShader.js';
class AfterimagePass extends Pass {
constructor( damp = 0.96 ) {
super();
this.shader = AfterimageShader;
this.uniforms = UniformsUtils.clone( this.shader.uniforms );
this.uniforms[ 'damp' ].value = damp;
this.textureComp = new WebGLRenderTarget( window.innerWidth, window.innerHeight, {
magFilter: NearestFilter,
type: HalfFloatType
} );
this.textureOld = new WebGLRenderTarget( window.innerWidth, window.innerHeight, {
magFilter: NearestFilter,
type: HalfFloatType
} );
this.compFsMaterial = new ShaderMaterial( {
uniforms: this.uniforms,
vertexShader: this.shader.vertexShader,
fragmentShader: this.shader.fragmentShader
} );
this.compFsQuad = new FullScreenQuad( this.compFsMaterial );
this.copyFsMaterial = new MeshBasicMaterial();
this.copyFsQuad = new FullScreenQuad( this.copyFsMaterial );
}
render( renderer, writeBuffer, readBuffer/*, deltaTime, maskActive*/ ) {
this.uniforms[ 'tOld' ].value = this.textureOld.texture;
this.uniforms[ 'tNew' ].value = readBuffer.texture;
renderer.setRenderTarget( this.textureComp );
this.compFsQuad.render( renderer );
this.copyFsQuad.material.map = this.textureComp.texture;
if ( this.renderToScreen ) {
renderer.setRenderTarget( null );
this.copyFsQuad.render( renderer );
} else {
renderer.setRenderTarget( writeBuffer );
if ( this.clear ) renderer.clear();
this.copyFsQuad.render( renderer );
}
// Swap buffers.
const temp = this.textureOld;
this.textureOld = this.textureComp;
this.textureComp = temp;
// Now textureOld contains the latest image, ready for the next frame.
}
setSize( width, height ) {
this.textureComp.setSize( width, height );
this.textureOld.setSize( width, height );
}
dispose() {
this.textureComp.dispose();
this.textureOld.dispose();
this.compFsMaterial.dispose();
this.copyFsMaterial.dispose();
this.compFsQuad.dispose();
this.copyFsQuad.dispose();
}
}
export { AfterimagePass };

View File

@ -0,0 +1,58 @@
/**
* Afterimage shader
* I created this effect inspired by a demo on codepen:
* https://codepen.io/brunoimbrizi/pen/MoRJaN?page=1&
*/
const AfterimageShader = {
name: 'AfterimageShader',
uniforms: {
'damp': { value: 0.96 },
'tOld': { value: null },
'tNew': { value: null }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float damp;
uniform sampler2D tOld;
uniform sampler2D tNew;
varying vec2 vUv;
vec4 when_gt( vec4 x, float y ) {
return max( sign( x - y ), 0.0 );
}
void main() {
vec4 texelOld = texture2D( tOld, vUv );
vec4 texelNew = texture2D( tNew, vUv );
texelOld *= damp * when_gt( texelOld, 0.1 );
gl_FragColor = max(texelNew, texelOld);
}`
};
export { AfterimageShader };

View File

@ -0,0 +1,437 @@
import {
AnimationClip,
Bone,
FileLoader,
Loader,
Quaternion,
QuaternionKeyframeTrack,
Skeleton,
Vector3,
VectorKeyframeTrack
} from '/static/javascript/three/build/three.module.js';
/**
* Description: reads BVH files and outputs a single Skeleton and an AnimationClip
*
* Currently only supports bvh files containing a single root.
*
*/
class BVHLoader extends Loader {
constructor( manager ) {
super( manager );
this.animateBonePositions = true;
this.animateBoneRotations = true;
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
parse( text ) {
/*
reads a string array (lines) from a BVH file
and outputs a skeleton structure including motion data
returns thee root node:
{ name: '', channels: [], children: [] }
*/
function readBvh( lines ) {
// read model structure
if ( nextLine( lines ) !== 'HIERARCHY' ) {
console.error( 'THREE.BVHLoader: HIERARCHY expected.' );
}
const list = []; // collects flat array of all bones
const root = readNode( lines, nextLine( lines ), list );
// read motion data
if ( nextLine( lines ) !== 'MOTION' ) {
console.error( 'THREE.BVHLoader: MOTION expected.' );
}
// number of frames
let tokens = nextLine( lines ).split( /[\s]+/ );
const numFrames = parseInt( tokens[ 1 ] );
if ( isNaN( numFrames ) ) {
console.error( 'THREE.BVHLoader: Failed to read number of frames.' );
}
// frame time
tokens = nextLine( lines ).split( /[\s]+/ );
const frameTime = parseFloat( tokens[ 2 ] );
if ( isNaN( frameTime ) ) {
console.error( 'THREE.BVHLoader: Failed to read frame time.' );
}
// read frame data line by line
for ( let i = 0; i < numFrames; i ++ ) {
tokens = nextLine( lines ).split( /[\s]+/ );
readFrameData( tokens, i * frameTime, root );
}
return list;
}
/*
Recursively reads data from a single frame into the bone hierarchy.
The passed bone hierarchy has to be structured in the same order as the BVH file.
keyframe data is stored in bone.frames.
- data: splitted string array (frame values), values are shift()ed so
this should be empty after parsing the whole hierarchy.
- frameTime: playback time for this keyframe.
- bone: the bone to read frame data from.
*/
function readFrameData( data, frameTime, bone ) {
// end sites have no motion data
if ( bone.type === 'ENDSITE' ) return;
// add keyframe
const keyframe = {
time: frameTime,
position: new Vector3(),
rotation: new Quaternion()
};
bone.frames.push( keyframe );
const quat = new Quaternion();
const vx = new Vector3( 1, 0, 0 );
const vy = new Vector3( 0, 1, 0 );
const vz = new Vector3( 0, 0, 1 );
// parse values for each channel in node
for ( let i = 0; i < bone.channels.length; i ++ ) {
switch ( bone.channels[ i ] ) {
case 'Xposition':
keyframe.position.x = parseFloat( data.shift().trim() );
break;
case 'Yposition':
keyframe.position.y = parseFloat( data.shift().trim() );
break;
case 'Zposition':
keyframe.position.z = parseFloat( data.shift().trim() );
break;
case 'Xrotation':
quat.setFromAxisAngle( vx, parseFloat( data.shift().trim() ) * Math.PI / 180 );
keyframe.rotation.multiply( quat );
break;
case 'Yrotation':
quat.setFromAxisAngle( vy, parseFloat( data.shift().trim() ) * Math.PI / 180 );
keyframe.rotation.multiply( quat );
break;
case 'Zrotation':
quat.setFromAxisAngle( vz, parseFloat( data.shift().trim() ) * Math.PI / 180 );
keyframe.rotation.multiply( quat );
break;
default:
console.warn( 'THREE.BVHLoader: Invalid channel type.' );
}
}
// parse child nodes
for ( let i = 0; i < bone.children.length; i ++ ) {
readFrameData( data, frameTime, bone.children[ i ] );
}
}
/*
Recursively parses the HIERACHY section of the BVH file
- lines: all lines of the file. lines are consumed as we go along.
- firstline: line containing the node type and name e.g. 'JOINT hip'
- list: collects a flat list of nodes
returns: a BVH node including children
*/
function readNode( lines, firstline, list ) {
const node = { name: '', type: '', frames: [] };
list.push( node );
// parse node type and name
let tokens = firstline.split( /[\s]+/ );
if ( tokens[ 0 ].toUpperCase() === 'END' && tokens[ 1 ].toUpperCase() === 'SITE' ) {
node.type = 'ENDSITE';
node.name = 'ENDSITE'; // bvh end sites have no name
} else {
node.name = tokens[ 1 ];
node.type = tokens[ 0 ].toUpperCase();
}
if ( nextLine( lines ) !== '{' ) {
console.error( 'THREE.BVHLoader: Expected opening { after type & name' );
}
// parse OFFSET
tokens = nextLine( lines ).split( /[\s]+/ );
if ( tokens[ 0 ] !== 'OFFSET' ) {
console.error( 'THREE.BVHLoader: Expected OFFSET but got: ' + tokens[ 0 ] );
}
if ( tokens.length !== 4 ) {
console.error( 'THREE.BVHLoader: Invalid number of values for OFFSET.' );
}
const offset = new Vector3(
parseFloat( tokens[ 1 ] ),
parseFloat( tokens[ 2 ] ),
parseFloat( tokens[ 3 ] )
);
if ( isNaN( offset.x ) || isNaN( offset.y ) || isNaN( offset.z ) ) {
console.error( 'THREE.BVHLoader: Invalid values of OFFSET.' );
}
node.offset = offset;
// parse CHANNELS definitions
if ( node.type !== 'ENDSITE' ) {
tokens = nextLine( lines ).split( /[\s]+/ );
if ( tokens[ 0 ] !== 'CHANNELS' ) {
console.error( 'THREE.BVHLoader: Expected CHANNELS definition.' );
}
const numChannels = parseInt( tokens[ 1 ] );
node.channels = tokens.splice( 2, numChannels );
node.children = [];
}
// read children
while ( true ) {
const line = nextLine( lines );
if ( line === '}' ) {
return node;
} else {
node.children.push( readNode( lines, line, list ) );
}
}
}
/*
recursively converts the internal bvh node structure to a Bone hierarchy
source: the bvh root node
list: pass an empty array, collects a flat list of all converted THREE.Bones
returns the root Bone
*/
function toTHREEBone( source, list ) {
const bone = new Bone();
list.push( bone );
bone.position.add( source.offset );
bone.name = source.name;
if ( source.type !== 'ENDSITE' ) {
for ( let i = 0; i < source.children.length; i ++ ) {
bone.add( toTHREEBone( source.children[ i ], list ) );
}
}
return bone;
}
/*
builds a AnimationClip from the keyframe data saved in each bone.
bone: bvh root node
returns: a AnimationClip containing position and quaternion tracks
*/
function toTHREEAnimation( bones ) {
const tracks = [];
// create a position and quaternion animation track for each node
for ( let i = 0; i < bones.length; i ++ ) {
const bone = bones[ i ];
if ( bone.type === 'ENDSITE' )
continue;
// track data
const times = [];
const positions = [];
const rotations = [];
for ( let j = 0; j < bone.frames.length; j ++ ) {
const frame = bone.frames[ j ];
times.push( frame.time );
// the animation system animates the position property,
// so we have to add the joint offset to all values
positions.push( frame.position.x + bone.offset.x );
positions.push( frame.position.y + bone.offset.y );
positions.push( frame.position.z + bone.offset.z );
rotations.push( frame.rotation.x );
rotations.push( frame.rotation.y );
rotations.push( frame.rotation.z );
rotations.push( frame.rotation.w );
}
if ( scope.animateBonePositions ) {
tracks.push( new VectorKeyframeTrack( bone.name + '.position', times, positions ) );
}
if ( scope.animateBoneRotations ) {
tracks.push( new QuaternionKeyframeTrack( bone.name + '.quaternion', times, rotations ) );
}
}
return new AnimationClip( 'animation', - 1, tracks );
}
/*
returns the next non-empty line in lines
*/
function nextLine( lines ) {
let line;
// skip empty lines
while ( ( line = lines.shift().trim() ).length === 0 ) { }
return line;
}
const scope = this;
const lines = text.split( /[\r\n]+/g );
const bones = readBvh( lines );
const threeBones = [];
toTHREEBone( bones[ 0 ], threeBones );
const threeClip = toTHREEAnimation( bones );
return {
skeleton: new Skeleton( threeBones ),
clip: threeClip
};
}
}
export { BVHLoader };

View File

@ -0,0 +1,29 @@
/**
* Simple test shader
*/
const BasicShader = {
name: 'BasicShader',
uniforms: {},
vertexShader: /* glsl */`
void main() {
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
void main() {
gl_FragColor = vec4( 1.0, 0.0, 0.0, 0.5 );
}`
};
export { BasicShader };

View File

@ -0,0 +1,62 @@
/**
* Bleach bypass shader [http://en.wikipedia.org/wiki/Bleach_bypass]
* - based on Nvidia example
* http://developer.download.nvidia.com/shaderlibrary/webpages/shader_library.html#post_bleach_bypass
*/
const BleachBypassShader = {
name: 'BleachBypassShader',
uniforms: {
'tDiffuse': { value: null },
'opacity': { value: 1.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float opacity;
uniform sampler2D tDiffuse;
varying vec2 vUv;
void main() {
vec4 base = texture2D( tDiffuse, vUv );
vec3 lumCoeff = vec3( 0.25, 0.65, 0.1 );
float lum = dot( lumCoeff, base.rgb );
vec3 blend = vec3( lum );
float L = min( 1.0, max( 0.0, 10.0 * ( lum - 0.45 ) ) );
vec3 result1 = 2.0 * base.rgb * blend;
vec3 result2 = 1.0 - 2.0 * ( 1.0 - blend ) * ( 1.0 - base.rgb );
vec3 newColor = mix( result1, result2, L );
float A2 = opacity * base.a;
vec3 mixRGB = A2 * newColor.rgb;
mixRGB += ( ( 1.0 - A2 ) * base.rgb );
gl_FragColor = vec4( mixRGB, base.a );
}`
};
export { BleachBypassShader };

View File

@ -0,0 +1,49 @@
/**
* Blend two textures
*/
const BlendShader = {
name: 'BlendShader',
uniforms: {
'tDiffuse1': { value: null },
'tDiffuse2': { value: null },
'mixRatio': { value: 0.5 },
'opacity': { value: 1.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float opacity;
uniform float mixRatio;
uniform sampler2D tDiffuse1;
uniform sampler2D tDiffuse2;
varying vec2 vUv;
void main() {
vec4 texel1 = texture2D( tDiffuse1, vUv );
vec4 texel2 = texture2D( tDiffuse2, vUv );
gl_FragColor = opacity * mix( texel1, texel2, mixRatio );
}`
};
export { BlendShader };

View File

@ -0,0 +1,172 @@
import {
AdditiveBlending,
HalfFloatType,
ShaderMaterial,
UniformsUtils,
Vector2,
WebGLRenderTarget
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from'/static/javascript/three/examples/jsm/postprocessing/Pass.js';
import { ConvolutionShader } from'/static/javascript/three/examples/jsm/shaders/ConvolutionShader.js';
class BloomPass extends Pass {
constructor( strength = 1, kernelSize = 25, sigma = 4 ) {
super();
// render targets
this.renderTargetX = new WebGLRenderTarget( 1, 1, { type: HalfFloatType } ); // will be resized later
this.renderTargetX.texture.name = 'BloomPass.x';
this.renderTargetY = new WebGLRenderTarget( 1, 1, { type: HalfFloatType } ); // will be resized later
this.renderTargetY.texture.name = 'BloomPass.y';
// combine material
this.combineUniforms = UniformsUtils.clone( CombineShader.uniforms );
this.combineUniforms[ 'strength' ].value = strength;
this.materialCombine = new ShaderMaterial( {
name: CombineShader.name,
uniforms: this.combineUniforms,
vertexShader: CombineShader.vertexShader,
fragmentShader: CombineShader.fragmentShader,
blending: AdditiveBlending,
transparent: true
} );
// convolution material
const convolutionShader = ConvolutionShader;
this.convolutionUniforms = UniformsUtils.clone( convolutionShader.uniforms );
this.convolutionUniforms[ 'uImageIncrement' ].value = BloomPass.blurX;
this.convolutionUniforms[ 'cKernel' ].value = ConvolutionShader.buildKernel( sigma );
this.materialConvolution = new ShaderMaterial( {
name: convolutionShader.name,
uniforms: this.convolutionUniforms,
vertexShader: convolutionShader.vertexShader,
fragmentShader: convolutionShader.fragmentShader,
defines: {
'KERNEL_SIZE_FLOAT': kernelSize.toFixed( 1 ),
'KERNEL_SIZE_INT': kernelSize.toFixed( 0 )
}
} );
this.needsSwap = false;
this.fsQuad = new FullScreenQuad( null );
}
render( renderer, writeBuffer, readBuffer, deltaTime, maskActive ) {
if ( maskActive ) renderer.state.buffers.stencil.setTest( false );
// Render quad with blured scene into texture (convolution pass 1)
this.fsQuad.material = this.materialConvolution;
this.convolutionUniforms[ 'tDiffuse' ].value = readBuffer.texture;
this.convolutionUniforms[ 'uImageIncrement' ].value = BloomPass.blurX;
renderer.setRenderTarget( this.renderTargetX );
renderer.clear();
this.fsQuad.render( renderer );
// Render quad with blured scene into texture (convolution pass 2)
this.convolutionUniforms[ 'tDiffuse' ].value = this.renderTargetX.texture;
this.convolutionUniforms[ 'uImageIncrement' ].value = BloomPass.blurY;
renderer.setRenderTarget( this.renderTargetY );
renderer.clear();
this.fsQuad.render( renderer );
// Render original scene with superimposed blur to texture
this.fsQuad.material = this.materialCombine;
this.combineUniforms[ 'tDiffuse' ].value = this.renderTargetY.texture;
if ( maskActive ) renderer.state.buffers.stencil.setTest( true );
renderer.setRenderTarget( readBuffer );
if ( this.clear ) renderer.clear();
this.fsQuad.render( renderer );
}
setSize( width, height ) {
this.renderTargetX.setSize( width, height );
this.renderTargetY.setSize( width, height );
}
dispose() {
this.renderTargetX.dispose();
this.renderTargetY.dispose();
this.materialCombine.dispose();
this.materialConvolution.dispose();
this.fsQuad.dispose();
}
}
const CombineShader = {
name: 'CombineShader',
uniforms: {
'tDiffuse': { value: null },
'strength': { value: 1.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float strength;
uniform sampler2D tDiffuse;
varying vec2 vUv;
void main() {
vec4 texel = texture2D( tDiffuse, vUv );
gl_FragColor = strength * texel;
}`
};
BloomPass.blurX = new Vector2( 0.001953125, 0.0 );
BloomPass.blurY = new Vector2( 0.0, 0.001953125 );
export { BloomPass };

View File

@ -0,0 +1,397 @@
import {
Vector2
} from '/static/javascript/three/build/three.module.js';
/**
* Depth-of-field shader with bokeh
* ported from GLSL shader by Martins Upitis
* http://blenderartists.org/forum/showthread.php?237488-GLSL-depth-of-field-with-bokeh-v2-4-(update)
*
* Requires #define RINGS and SAMPLES integers
*/
const BokehShader = {
name: 'BokehShader',
uniforms: {
'textureWidth': { value: 1.0 },
'textureHeight': { value: 1.0 },
'focalDepth': { value: 1.0 },
'focalLength': { value: 24.0 },
'fstop': { value: 0.9 },
'tColor': { value: null },
'tDepth': { value: null },
'maxblur': { value: 1.0 },
'showFocus': { value: 0 },
'manualdof': { value: 0 },
'vignetting': { value: 0 },
'depthblur': { value: 0 },
'threshold': { value: 0.5 },
'gain': { value: 2.0 },
'bias': { value: 0.5 },
'fringe': { value: 0.7 },
'znear': { value: 0.1 },
'zfar': { value: 100 },
'noise': { value: 1 },
'dithering': { value: 0.0001 },
'pentagon': { value: 0 },
'shaderFocus': { value: 1 },
'focusCoords': { value: new Vector2() }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
#include <common>
varying vec2 vUv;
uniform sampler2D tColor;
uniform sampler2D tDepth;
uniform float textureWidth;
uniform float textureHeight;
uniform float focalDepth; //focal distance value in meters, but you may use autofocus option below
uniform float focalLength; //focal length in mm
uniform float fstop; //f-stop value
uniform bool showFocus; //show debug focus point and focal range (red = focal point, green = focal range)
/*
make sure that these two values are the same for your camera, otherwise distances will be wrong.
*/
uniform float znear; // camera clipping start
uniform float zfar; // camera clipping end
//------------------------------------------
//user variables
const int samples = SAMPLES; //samples on the first ring
const int rings = RINGS; //ring count
const int maxringsamples = rings * samples;
uniform bool manualdof; // manual dof calculation
float ndofstart = 1.0; // near dof blur start
float ndofdist = 2.0; // near dof blur falloff distance
float fdofstart = 1.0; // far dof blur start
float fdofdist = 3.0; // far dof blur falloff distance
float CoC = 0.03; //circle of confusion size in mm (35mm film = 0.03mm)
uniform bool vignetting; // use optical lens vignetting
float vignout = 1.3; // vignetting outer border
float vignin = 0.0; // vignetting inner border
float vignfade = 22.0; // f-stops till vignete fades
uniform bool shaderFocus;
// disable if you use external focalDepth value
uniform vec2 focusCoords;
// autofocus point on screen (0.0,0.0 - left lower corner, 1.0,1.0 - upper right)
// if center of screen use vec2(0.5, 0.5);
uniform float maxblur;
//clamp value of max blur (0.0 = no blur, 1.0 default)
uniform float threshold; // highlight threshold;
uniform float gain; // highlight gain;
uniform float bias; // bokeh edge bias
uniform float fringe; // bokeh chromatic aberration / fringing
uniform bool noise; //use noise instead of pattern for sample dithering
uniform float dithering;
uniform bool depthblur; // blur the depth buffer
float dbsize = 1.25; // depth blur size
/*
next part is experimental
not looking good with small sample and ring count
looks okay starting from samples = 4, rings = 4
*/
uniform bool pentagon; //use pentagon as bokeh shape?
float feather = 0.4; //pentagon shape feather
//------------------------------------------
float penta(vec2 coords) {
//pentagonal shape
float scale = float(rings) - 1.3;
vec4 HS0 = vec4( 1.0, 0.0, 0.0, 1.0);
vec4 HS1 = vec4( 0.309016994, 0.951056516, 0.0, 1.0);
vec4 HS2 = vec4(-0.809016994, 0.587785252, 0.0, 1.0);
vec4 HS3 = vec4(-0.809016994,-0.587785252, 0.0, 1.0);
vec4 HS4 = vec4( 0.309016994,-0.951056516, 0.0, 1.0);
vec4 HS5 = vec4( 0.0 ,0.0 , 1.0, 1.0);
vec4 one = vec4( 1.0 );
vec4 P = vec4((coords),vec2(scale, scale));
vec4 dist = vec4(0.0);
float inorout = -4.0;
dist.x = dot( P, HS0 );
dist.y = dot( P, HS1 );
dist.z = dot( P, HS2 );
dist.w = dot( P, HS3 );
dist = smoothstep( -feather, feather, dist );
inorout += dot( dist, one );
dist.x = dot( P, HS4 );
dist.y = HS5.w - abs( P.z );
dist = smoothstep( -feather, feather, dist );
inorout += dist.x;
return clamp( inorout, 0.0, 1.0 );
}
float bdepth(vec2 coords) {
// Depth buffer blur
float d = 0.0;
float kernel[9];
vec2 offset[9];
vec2 wh = vec2(1.0/textureWidth,1.0/textureHeight) * dbsize;
offset[0] = vec2(-wh.x,-wh.y);
offset[1] = vec2( 0.0, -wh.y);
offset[2] = vec2( wh.x -wh.y);
offset[3] = vec2(-wh.x, 0.0);
offset[4] = vec2( 0.0, 0.0);
offset[5] = vec2( wh.x, 0.0);
offset[6] = vec2(-wh.x, wh.y);
offset[7] = vec2( 0.0, wh.y);
offset[8] = vec2( wh.x, wh.y);
kernel[0] = 1.0/16.0; kernel[1] = 2.0/16.0; kernel[2] = 1.0/16.0;
kernel[3] = 2.0/16.0; kernel[4] = 4.0/16.0; kernel[5] = 2.0/16.0;
kernel[6] = 1.0/16.0; kernel[7] = 2.0/16.0; kernel[8] = 1.0/16.0;
for( int i=0; i<9; i++ ) {
float tmp = texture2D(tDepth, coords + offset[i]).r;
d += tmp * kernel[i];
}
return d;
}
vec3 color(vec2 coords,float blur) {
//processing the sample
vec3 col = vec3(0.0);
vec2 texel = vec2(1.0/textureWidth,1.0/textureHeight);
col.r = texture2D(tColor,coords + vec2(0.0,1.0)*texel*fringe*blur).r;
col.g = texture2D(tColor,coords + vec2(-0.866,-0.5)*texel*fringe*blur).g;
col.b = texture2D(tColor,coords + vec2(0.866,-0.5)*texel*fringe*blur).b;
vec3 lumcoeff = vec3(0.299,0.587,0.114);
float lum = dot(col.rgb, lumcoeff);
float thresh = max((lum-threshold)*gain, 0.0);
return col+mix(vec3(0.0),col,thresh*blur);
}
vec3 debugFocus(vec3 col, float blur, float depth) {
float edge = 0.002*depth; //distance based edge smoothing
float m = clamp(smoothstep(0.0,edge,blur),0.0,1.0);
float e = clamp(smoothstep(1.0-edge,1.0,blur),0.0,1.0);
col = mix(col,vec3(1.0,0.5,0.0),(1.0-m)*0.6);
col = mix(col,vec3(0.0,0.5,1.0),((1.0-e)-(1.0-m))*0.2);
return col;
}
float linearize(float depth) {
return -zfar * znear / (depth * (zfar - znear) - zfar);
}
float vignette() {
float dist = distance(vUv.xy, vec2(0.5,0.5));
dist = smoothstep(vignout+(fstop/vignfade), vignin+(fstop/vignfade), dist);
return clamp(dist,0.0,1.0);
}
float gather(float i, float j, int ringsamples, inout vec3 col, float w, float h, float blur) {
float rings2 = float(rings);
float step = PI*2.0 / float(ringsamples);
float pw = cos(j*step)*i;
float ph = sin(j*step)*i;
float p = 1.0;
if (pentagon) {
p = penta(vec2(pw,ph));
}
col += color(vUv.xy + vec2(pw*w,ph*h), blur) * mix(1.0, i/rings2, bias) * p;
return 1.0 * mix(1.0, i /rings2, bias) * p;
}
void main() {
//scene depth calculation
float depth = linearize(texture2D(tDepth,vUv.xy).x);
// Blur depth?
if ( depthblur ) {
depth = linearize(bdepth(vUv.xy));
}
//focal plane calculation
float fDepth = focalDepth;
if (shaderFocus) {
fDepth = linearize(texture2D(tDepth,focusCoords).x);
}
// dof blur factor calculation
float blur = 0.0;
if (manualdof) {
float a = depth-fDepth; // Focal plane
float b = (a-fdofstart)/fdofdist; // Far DoF
float c = (-a-ndofstart)/ndofdist; // Near Dof
blur = (a>0.0) ? b : c;
} else {
float f = focalLength; // focal length in mm
float d = fDepth*1000.0; // focal plane in mm
float o = depth*1000.0; // depth in mm
float a = (o*f)/(o-f);
float b = (d*f)/(d-f);
float c = (d-f)/(d*fstop*CoC);
blur = abs(a-b)*c;
}
blur = clamp(blur,0.0,1.0);
// calculation of pattern for dithering
vec2 noise = vec2(rand(vUv.xy), rand( vUv.xy + vec2( 0.4, 0.6 ) ) )*dithering*blur;
// getting blur x and y step factor
float w = (1.0/textureWidth)*blur*maxblur+noise.x;
float h = (1.0/textureHeight)*blur*maxblur+noise.y;
// calculation of final color
vec3 col = vec3(0.0);
if(blur < 0.05) {
//some optimization thingy
col = texture2D(tColor, vUv.xy).rgb;
} else {
col = texture2D(tColor, vUv.xy).rgb;
float s = 1.0;
int ringsamples;
for (int i = 1; i <= rings; i++) {
/*unboxstart*/
ringsamples = i * samples;
for (int j = 0 ; j < maxringsamples ; j++) {
if (j >= ringsamples) break;
s += gather(float(i), float(j), ringsamples, col, w, h, blur);
}
/*unboxend*/
}
col /= s; //divide by sample count
}
if (showFocus) {
col = debugFocus(col, blur, depth);
}
if (vignetting) {
col *= vignette();
}
gl_FragColor.rgb = col;
gl_FragColor.a = 1.0;
#include <tonemapping_fragment>
#include <colorspace_fragment>
}`
};
const BokehDepthShader = {
name: 'BokehDepthShader',
uniforms: {
'mNear': { value: 1.0 },
'mFar': { value: 1000.0 },
},
vertexShader: /* glsl */`
varying float vViewZDepth;
void main() {
#include <begin_vertex>
#include <project_vertex>
vViewZDepth = - mvPosition.z;
}`,
fragmentShader: /* glsl */`
uniform float mNear;
uniform float mFar;
varying float vViewZDepth;
void main() {
float color = 1.0 - smoothstep( mNear, mFar, vViewZDepth );
gl_FragColor = vec4( vec3( color ), 1.0 );
}`
};
export { BokehShader, BokehDepthShader };

View File

@ -0,0 +1,56 @@
/**
* Brightness and contrast adjustment
* https://github.com/evanw/glfx.js
* brightness: -1 to 1 (-1 is solid black, 0 is no change, and 1 is solid white)
* contrast: -1 to 1 (-1 is solid gray, 0 is no change, and 1 is maximum contrast)
*/
const BrightnessContrastShader = {
name: 'BrightnessContrastShader',
uniforms: {
'tDiffuse': { value: null },
'brightness': { value: 0 },
'contrast': { value: 0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform float brightness;
uniform float contrast;
varying vec2 vUv;
void main() {
gl_FragColor = texture2D( tDiffuse, vUv );
gl_FragColor.rgb += brightness;
if (contrast > 0.0) {
gl_FragColor.rgb = (gl_FragColor.rgb - 0.5) / (1.0 - contrast) + 0.5;
} else {
gl_FragColor.rgb = (gl_FragColor.rgb - 0.5) * (1.0 + contrast) + 0.5;
}
}`
};
export { BrightnessContrastShader };

View File

@ -0,0 +1,73 @@
import {
MathUtils,
Quaternion,
Vector3
} from '/static/javascript/three/build/three.module.js';
const _va = /*@__PURE__*/ new Vector3(), // from pe to pa
_vb = /*@__PURE__*/ new Vector3(), // from pe to pb
_vc = /*@__PURE__*/ new Vector3(), // from pe to pc
_vr = /*@__PURE__*/ new Vector3(), // right axis of screen
_vu = /*@__PURE__*/ new Vector3(), // up axis of screen
_vn = /*@__PURE__*/ new Vector3(), // normal vector of screen
_vec = /*@__PURE__*/ new Vector3(), // temporary vector
_quat = /*@__PURE__*/ new Quaternion(); // temporary quaternion
/** Set a PerspectiveCamera's projectionMatrix and quaternion
* to exactly frame the corners of an arbitrary rectangle.
* NOTE: This function ignores the standard parameters;
* do not call updateProjectionMatrix() after this!
* @param {Vector3} bottomLeftCorner
* @param {Vector3} bottomRightCorner
* @param {Vector3} topLeftCorner
* @param {boolean} estimateViewFrustum */
function frameCorners( camera, bottomLeftCorner, bottomRightCorner, topLeftCorner, estimateViewFrustum = false ) {
const pa = bottomLeftCorner, pb = bottomRightCorner, pc = topLeftCorner;
const pe = camera.position; // eye position
const n = camera.near; // distance of near clipping plane
const f = camera.far; //distance of far clipping plane
_vr.copy( pb ).sub( pa ).normalize();
_vu.copy( pc ).sub( pa ).normalize();
_vn.crossVectors( _vr, _vu ).normalize();
_va.copy( pa ).sub( pe ); // from pe to pa
_vb.copy( pb ).sub( pe ); // from pe to pb
_vc.copy( pc ).sub( pe ); // from pe to pc
const d = - _va.dot( _vn ); // distance from eye to screen
const l = _vr.dot( _va ) * n / d; // distance to left screen edge
const r = _vr.dot( _vb ) * n / d; // distance to right screen edge
const b = _vu.dot( _va ) * n / d; // distance to bottom screen edge
const t = _vu.dot( _vc ) * n / d; // distance to top screen edge
// Set the camera rotation to match the focal plane to the corners' plane
_quat.setFromUnitVectors( _vec.set( 0, 1, 0 ), _vu );
camera.quaternion.setFromUnitVectors( _vec.set( 0, 0, 1 ).applyQuaternion( _quat ), _vn ).multiply( _quat );
// Set the off-axis projection matrix to match the corners
camera.projectionMatrix.set( 2.0 * n / ( r - l ), 0.0,
( r + l ) / ( r - l ), 0.0, 0.0,
2.0 * n / ( t - b ),
( t + b ) / ( t - b ), 0.0, 0.0, 0.0,
( f + n ) / ( n - f ),
2.0 * f * n / ( n - f ), 0.0, 0.0, - 1.0, 0.0 );
camera.projectionMatrixInverse.copy( camera.projectionMatrix ).invert();
// FoV estimation to fix frustum culling
if ( estimateViewFrustum ) {
// Set fieldOfView to a conservative estimate
// to make frustum tall/wide enough to encompass it
camera.fov =
MathUtils.RAD2DEG / Math.min( 1.0, camera.aspect ) *
Math.atan( ( _vec.copy( pb ).sub( pa ).length() +
( _vec.copy( pc ).sub( pa ).length() ) ) / _va.length() );
}
}
export { frameCorners };

View File

@ -0,0 +1,46 @@
import {
Color
} from '/static/javascript/three/build/three.module.js';
import { Pass } from'/static/javascript/three/examples/jsm/postprocessing/Pass.js';
class ClearPass extends Pass {
constructor( clearColor, clearAlpha ) {
super();
this.needsSwap = false;
this.clearColor = ( clearColor !== undefined ) ? clearColor : 0x000000;
this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 0;
this._oldClearColor = new Color();
}
render( renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) {
let oldClearAlpha;
if ( this.clearColor ) {
renderer.getClearColor( this._oldClearColor );
oldClearAlpha = renderer.getClearAlpha();
renderer.setClearColor( this.clearColor, this.clearAlpha );
}
renderer.setRenderTarget( this.renderToScreen ? null : readBuffer );
renderer.clear();
if ( this.clearColor ) {
renderer.setClearColor( this._oldClearColor, oldClearAlpha );
}
}
}
export { ClearPass };

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,52 @@
import {
Vector3
} from '/static/javascript/three/build/three.module.js';
/**
* Color correction
*/
const ColorCorrectionShader = {
name: 'ColorCorrectionShader',
uniforms: {
'tDiffuse': { value: null },
'powRGB': { value: new Vector3( 2, 2, 2 ) },
'mulRGB': { value: new Vector3( 1, 1, 1 ) },
'addRGB': { value: new Vector3( 0, 0, 0 ) }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform vec3 powRGB;
uniform vec3 mulRGB;
uniform vec3 addRGB;
varying vec2 vUv;
void main() {
gl_FragColor = texture2D( tDiffuse, vUv );
gl_FragColor.rgb = mulRGB * pow( ( gl_FragColor.rgb + addRGB ), powRGB );
}`
};
export { ColorCorrectionShader };

View File

@ -0,0 +1,50 @@
import {
Color
} from '/static/javascript/three/build/three.module.js';
/**
* Colorify shader
*/
const ColorifyShader = {
name: 'ColorifyShader',
uniforms: {
'tDiffuse': { value: null },
'color': { value: new Color( 0xffffff ) }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform vec3 color;
uniform sampler2D tDiffuse;
varying vec2 vUv;
void main() {
vec4 texel = texture2D( tDiffuse, vUv );
float v = luminance( texel.xyz );
gl_FragColor = vec4( v * color, texel.w );
}`
};
export { ColorifyShader };

View File

@ -0,0 +1,103 @@
import {
Vector2
} from '/static/javascript/three/build/three.module.js';
/**
* Convolution shader
* ported from o3d sample to WebGL / GLSL
*/
const ConvolutionShader = {
name: 'ConvolutionShader',
defines: {
'KERNEL_SIZE_FLOAT': '25.0',
'KERNEL_SIZE_INT': '25'
},
uniforms: {
'tDiffuse': { value: null },
'uImageIncrement': { value: new Vector2( 0.001953125, 0.0 ) },
'cKernel': { value: [] }
},
vertexShader: /* glsl */`
uniform vec2 uImageIncrement;
varying vec2 vUv;
void main() {
vUv = uv - ( ( KERNEL_SIZE_FLOAT - 1.0 ) / 2.0 ) * uImageIncrement;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float cKernel[ KERNEL_SIZE_INT ];
uniform sampler2D tDiffuse;
uniform vec2 uImageIncrement;
varying vec2 vUv;
void main() {
vec2 imageCoord = vUv;
vec4 sum = vec4( 0.0, 0.0, 0.0, 0.0 );
for( int i = 0; i < KERNEL_SIZE_INT; i ++ ) {
sum += texture2D( tDiffuse, imageCoord ) * cKernel[ i ];
imageCoord += uImageIncrement;
}
gl_FragColor = sum;
}`,
buildKernel: function ( sigma ) {
// We lop off the sqrt(2 * pi) * sigma term, since we're going to normalize anyway.
const kMaxKernelSize = 25;
let kernelSize = 2 * Math.ceil( sigma * 3.0 ) + 1;
if ( kernelSize > kMaxKernelSize ) kernelSize = kMaxKernelSize;
const halfWidth = ( kernelSize - 1 ) * 0.5;
const values = new Array( kernelSize );
let sum = 0.0;
for ( let i = 0; i < kernelSize; ++ i ) {
values[ i ] = gauss( i - halfWidth, sigma );
sum += values[ i ];
}
// normalize the kernel
for ( let i = 0; i < kernelSize; ++ i ) values[ i ] /= sum;
return values;
}
};
function gauss( x, sigma ) {
return Math.exp( - ( x * x ) / ( 2.0 * sigma * sigma ) );
}
export { ConvolutionShader };

View File

@ -0,0 +1,85 @@
import {
BackSide,
BoxGeometry,
Mesh,
PerspectiveCamera,
Scene,
ShaderLib,
ShaderMaterial,
UniformsUtils
} from '/static/javascript/three/build/three.module.js';
import { Pass } from'/static/javascript/three/examples/jsm/postprocessing/Pass.js';
class CubeTexturePass extends Pass {
constructor( camera, tCube, opacity = 1 ) {
super();
this.camera = camera;
this.needsSwap = false;
this.cubeShader = ShaderLib[ 'cube' ];
this.cubeMesh = new Mesh(
new BoxGeometry( 10, 10, 10 ),
new ShaderMaterial( {
uniforms: UniformsUtils.clone( this.cubeShader.uniforms ),
vertexShader: this.cubeShader.vertexShader,
fragmentShader: this.cubeShader.fragmentShader,
depthTest: false,
depthWrite: false,
side: BackSide
} )
);
Object.defineProperty( this.cubeMesh.material, 'envMap', {
get: function () {
return this.uniforms.tCube.value;
}
} );
this.tCube = tCube;
this.opacity = opacity;
this.cubeScene = new Scene();
this.cubeCamera = new PerspectiveCamera();
this.cubeScene.add( this.cubeMesh );
}
render( renderer, writeBuffer, readBuffer/*, deltaTime, maskActive*/ ) {
const oldAutoClear = renderer.autoClear;
renderer.autoClear = false;
this.cubeCamera.projectionMatrix.copy( this.camera.projectionMatrix );
this.cubeCamera.quaternion.setFromRotationMatrix( this.camera.matrixWorld );
this.cubeMesh.material.uniforms.tCube.value = this.tCube;
this.cubeMesh.material.uniforms.tFlip.value = ( this.tCube.isCubeTexture && this.tCube.isRenderTargetTexture === false ) ? - 1 : 1;
this.cubeMesh.material.uniforms.opacity.value = this.opacity;
this.cubeMesh.material.transparent = ( this.opacity < 1.0 );
renderer.setRenderTarget( this.renderToScreen ? null : readBuffer );
if ( this.clear ) renderer.clear();
renderer.render( this.cubeScene, this.cubeCamera );
renderer.autoClear = oldAutoClear;
}
dispose() {
this.cubeMesh.geometry.dispose();
this.cubeMesh.material.dispose();
}
}
export { CubeTexturePass };

View File

@ -0,0 +1,318 @@
import {
CompressedTextureLoader,
RGBAFormat,
RGBA_S3TC_DXT3_Format,
RGBA_S3TC_DXT5_Format,
RGB_ETC1_Format,
RGB_S3TC_DXT1_Format,
RGB_BPTC_SIGNED_Format,
RGB_BPTC_UNSIGNED_Format
} from '/static/javascript/three/build/three.module.js';
class DDSLoader extends CompressedTextureLoader {
constructor( manager ) {
super( manager );
}
parse( buffer, loadMipmaps ) {
const dds = { mipmaps: [], width: 0, height: 0, format: null, mipmapCount: 1 };
// Adapted from @toji's DDS utils
// https://github.com/toji/webgl-texture-utils/blob/master/texture-util/dds.js
// All values and structures referenced from:
// http://msdn.microsoft.com/en-us/library/bb943991.aspx/
const DDS_MAGIC = 0x20534444;
// const DDSD_CAPS = 0x1;
// const DDSD_HEIGHT = 0x2;
// const DDSD_WIDTH = 0x4;
// const DDSD_PITCH = 0x8;
// const DDSD_PIXELFORMAT = 0x1000;
const DDSD_MIPMAPCOUNT = 0x20000;
// const DDSD_LINEARSIZE = 0x80000;
// const DDSD_DEPTH = 0x800000;
// const DDSCAPS_COMPLEX = 0x8;
// const DDSCAPS_MIPMAP = 0x400000;
// const DDSCAPS_TEXTURE = 0x1000;
const DDSCAPS2_CUBEMAP = 0x200;
const DDSCAPS2_CUBEMAP_POSITIVEX = 0x400;
const DDSCAPS2_CUBEMAP_NEGATIVEX = 0x800;
const DDSCAPS2_CUBEMAP_POSITIVEY = 0x1000;
const DDSCAPS2_CUBEMAP_NEGATIVEY = 0x2000;
const DDSCAPS2_CUBEMAP_POSITIVEZ = 0x4000;
const DDSCAPS2_CUBEMAP_NEGATIVEZ = 0x8000;
// const DDSCAPS2_VOLUME = 0x200000;
// const DDPF_ALPHAPIXELS = 0x1;
// const DDPF_ALPHA = 0x2;
// const DDPF_FOURCC = 0x4;
// const DDPF_RGB = 0x40;
// const DDPF_YUV = 0x200;
// const DDPF_LUMINANCE = 0x20000;
const DXGI_FORMAT_BC6H_UF16 = 95;
const DXGI_FORMAT_BC6H_SF16 = 96;
function fourCCToInt32( value ) {
return value.charCodeAt( 0 ) +
( value.charCodeAt( 1 ) << 8 ) +
( value.charCodeAt( 2 ) << 16 ) +
( value.charCodeAt( 3 ) << 24 );
}
function int32ToFourCC( value ) {
return String.fromCharCode(
value & 0xff,
( value >> 8 ) & 0xff,
( value >> 16 ) & 0xff,
( value >> 24 ) & 0xff
);
}
function loadARGBMip( buffer, dataOffset, width, height ) {
const dataLength = width * height * 4;
const srcBuffer = new Uint8Array( buffer, dataOffset, dataLength );
const byteArray = new Uint8Array( dataLength );
let dst = 0;
let src = 0;
for ( let y = 0; y < height; y ++ ) {
for ( let x = 0; x < width; x ++ ) {
const b = srcBuffer[ src ]; src ++;
const g = srcBuffer[ src ]; src ++;
const r = srcBuffer[ src ]; src ++;
const a = srcBuffer[ src ]; src ++;
byteArray[ dst ] = r; dst ++; //r
byteArray[ dst ] = g; dst ++; //g
byteArray[ dst ] = b; dst ++; //b
byteArray[ dst ] = a; dst ++; //a
}
}
return byteArray;
}
const FOURCC_DXT1 = fourCCToInt32( 'DXT1' );
const FOURCC_DXT3 = fourCCToInt32( 'DXT3' );
const FOURCC_DXT5 = fourCCToInt32( 'DXT5' );
const FOURCC_ETC1 = fourCCToInt32( 'ETC1' );
const FOURCC_DX10 = fourCCToInt32( 'DX10' );
const headerLengthInt = 31; // The header length in 32 bit ints
const extendedHeaderLengthInt = 5; // The extended header length in 32 bit ints
// Offsets into the header array
const off_magic = 0;
const off_size = 1;
const off_flags = 2;
const off_height = 3;
const off_width = 4;
const off_mipmapCount = 7;
// const off_pfFlags = 20;
const off_pfFourCC = 21;
const off_RGBBitCount = 22;
const off_RBitMask = 23;
const off_GBitMask = 24;
const off_BBitMask = 25;
const off_ABitMask = 26;
// const off_caps = 27;
const off_caps2 = 28;
// const off_caps3 = 29;
// const off_caps4 = 30;
// If fourCC = DX10, the extended header starts after 32
const off_dxgiFormat = 0;
// Parse header
const header = new Int32Array( buffer, 0, headerLengthInt );
if ( header[ off_magic ] !== DDS_MAGIC ) {
console.error( 'THREE.DDSLoader.parse: Invalid magic number in DDS header.' );
return dds;
}
let blockBytes;
const fourCC = header[ off_pfFourCC ];
let isRGBAUncompressed = false;
let dataOffset = header[ off_size ] + 4;
switch ( fourCC ) {
case FOURCC_DXT1:
blockBytes = 8;
dds.format = RGB_S3TC_DXT1_Format;
break;
case FOURCC_DXT3:
blockBytes = 16;
dds.format = RGBA_S3TC_DXT3_Format;
break;
case FOURCC_DXT5:
blockBytes = 16;
dds.format = RGBA_S3TC_DXT5_Format;
break;
case FOURCC_ETC1:
blockBytes = 8;
dds.format = RGB_ETC1_Format;
break;
case FOURCC_DX10:
dataOffset += extendedHeaderLengthInt * 4;
const extendedHeader = new Int32Array( buffer, ( headerLengthInt + 1 ) * 4, extendedHeaderLengthInt );
const dxgiFormat = extendedHeader[ off_dxgiFormat ];
switch ( dxgiFormat ) {
case DXGI_FORMAT_BC6H_SF16: {
blockBytes = 16;
dds.format = RGB_BPTC_SIGNED_Format;
break;
}
case DXGI_FORMAT_BC6H_UF16: {
blockBytes = 16;
dds.format = RGB_BPTC_UNSIGNED_Format;
break;
}
default: {
console.error( 'THREE.DDSLoader.parse: Unsupported DXGI_FORMAT code ', dxgiFormat );
return dds;
}
}
break;
default:
if ( header[ off_RGBBitCount ] === 32
&& header[ off_RBitMask ] & 0xff0000
&& header[ off_GBitMask ] & 0xff00
&& header[ off_BBitMask ] & 0xff
&& header[ off_ABitMask ] & 0xff000000 ) {
isRGBAUncompressed = true;
blockBytes = 64;
dds.format = RGBAFormat;
} else {
console.error( 'THREE.DDSLoader.parse: Unsupported FourCC code ', int32ToFourCC( fourCC ) );
return dds;
}
}
dds.mipmapCount = 1;
if ( header[ off_flags ] & DDSD_MIPMAPCOUNT && loadMipmaps !== false ) {
dds.mipmapCount = Math.max( 1, header[ off_mipmapCount ] );
}
const caps2 = header[ off_caps2 ];
dds.isCubemap = caps2 & DDSCAPS2_CUBEMAP ? true : false;
if ( dds.isCubemap && (
! ( caps2 & DDSCAPS2_CUBEMAP_POSITIVEX ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_NEGATIVEX ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_POSITIVEY ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_NEGATIVEY ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_POSITIVEZ ) ||
! ( caps2 & DDSCAPS2_CUBEMAP_NEGATIVEZ )
) ) {
console.error( 'THREE.DDSLoader.parse: Incomplete cubemap faces' );
return dds;
}
dds.width = header[ off_width ];
dds.height = header[ off_height ];
// Extract mipmaps buffers
const faces = dds.isCubemap ? 6 : 1;
for ( let face = 0; face < faces; face ++ ) {
let width = dds.width;
let height = dds.height;
for ( let i = 0; i < dds.mipmapCount; i ++ ) {
let byteArray, dataLength;
if ( isRGBAUncompressed ) {
byteArray = loadARGBMip( buffer, dataOffset, width, height );
dataLength = byteArray.length;
} else {
dataLength = Math.max( 4, width ) / 4 * Math.max( 4, height ) / 4 * blockBytes;
byteArray = new Uint8Array( buffer, dataOffset, dataLength );
}
const mipmap = { 'data': byteArray, 'width': width, 'height': height };
dds.mipmaps.push( mipmap );
dataOffset += dataLength;
width = Math.max( width >> 1, 1 );
height = Math.max( height >> 1, 1 );
}
}
return dds;
}
}
export { DDSLoader };

View File

@ -0,0 +1,56 @@
/**
* Depth-of-field shader using mipmaps
* - from Matt Handley @applmak
* - requires power-of-2 sized render target with enabled mipmaps
*/
const DOFMipMapShader = {
name: 'DOFMipMapShader',
uniforms: {
'tColor': { value: null },
'tDepth': { value: null },
'focus': { value: 1.0 },
'maxblur': { value: 1.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float focus;
uniform float maxblur;
uniform sampler2D tColor;
uniform sampler2D tDepth;
varying vec2 vUv;
void main() {
vec4 depth = texture2D( tDepth, vUv );
float factor = depth.x - focus;
vec4 col = texture2D( tColor, vUv, 2.0 * maxblur * abs( focus - depth.x ) );
gl_FragColor = col;
gl_FragColor.a = 1.0;
}`
};
export { DOFMipMapShader };

View File

@ -0,0 +1,613 @@
import {
BufferAttribute,
BufferGeometry,
Color,
FileLoader,
Loader,
LinearSRGBColorSpace,
SRGBColorSpace
} from '/static/javascript/three/build/three.module.js';
const _taskCache = new WeakMap();
class DRACOLoader extends Loader {
constructor( manager ) {
super( manager );
this.decoderPath = '';
this.decoderConfig = {};
this.decoderBinary = null;
this.decoderPending = null;
this.workerLimit = 4;
this.workerPool = [];
this.workerNextTaskID = 1;
this.workerSourceURL = '';
this.defaultAttributeIDs = {
position: 'POSITION',
normal: 'NORMAL',
color: 'COLOR',
uv: 'TEX_COORD'
};
this.defaultAttributeTypes = {
position: 'Float32Array',
normal: 'Float32Array',
color: 'Float32Array',
uv: 'Float32Array'
};
}
setDecoderPath( path ) {
this.decoderPath = path;
return this;
}
setDecoderConfig( config ) {
this.decoderConfig = config;
return this;
}
setWorkerLimit( workerLimit ) {
this.workerLimit = workerLimit;
return this;
}
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, ( buffer ) => {
this.parse( buffer, onLoad, onError );
}, onProgress, onError );
}
parse( buffer, onLoad, onError = ()=>{} ) {
this.decodeDracoFile( buffer, onLoad, null, null, SRGBColorSpace, onError ).catch( onError );
}
decodeDracoFile( buffer, callback, attributeIDs, attributeTypes, vertexColorSpace = LinearSRGBColorSpace, onError = () => {} ) {
const taskConfig = {
attributeIDs: attributeIDs || this.defaultAttributeIDs,
attributeTypes: attributeTypes || this.defaultAttributeTypes,
useUniqueIDs: !! attributeIDs,
vertexColorSpace: vertexColorSpace,
};
return this.decodeGeometry( buffer, taskConfig ).then( callback ).catch( onError );
}
decodeGeometry( buffer, taskConfig ) {
const taskKey = JSON.stringify( taskConfig );
// Check for an existing task using this buffer. A transferred buffer cannot be transferred
// again from this thread.
if ( _taskCache.has( buffer ) ) {
const cachedTask = _taskCache.get( buffer );
if ( cachedTask.key === taskKey ) {
return cachedTask.promise;
} else if ( buffer.byteLength === 0 ) {
// Technically, it would be possible to wait for the previous task to complete,
// transfer the buffer back, and decode again with the second configuration. That
// is complex, and I don't know of any reason to decode a Draco buffer twice in
// different ways, so this is left unimplemented.
throw new Error(
'THREE.DRACOLoader: Unable to re-decode a buffer with different ' +
'settings. Buffer has already been transferred.'
);
}
}
//
let worker;
const taskID = this.workerNextTaskID ++;
const taskCost = buffer.byteLength;
// Obtain a worker and assign a task, and construct a geometry instance
// when the task completes.
const geometryPending = this._getWorker( taskID, taskCost )
.then( ( _worker ) => {
worker = _worker;
return new Promise( ( resolve, reject ) => {
worker._callbacks[ taskID ] = { resolve, reject };
worker.postMessage( { type: 'decode', id: taskID, taskConfig, buffer }, [ buffer ] );
// this.debug();
} );
} )
.then( ( message ) => this._createGeometry( message.geometry ) );
// Remove task from the task list.
// Note: replaced '.finally()' with '.catch().then()' block - iOS 11 support (#19416)
geometryPending
.catch( () => true )
.then( () => {
if ( worker && taskID ) {
this._releaseTask( worker, taskID );
// this.debug();
}
} );
// Cache the task result.
_taskCache.set( buffer, {
key: taskKey,
promise: geometryPending
} );
return geometryPending;
}
_createGeometry( geometryData ) {
const geometry = new BufferGeometry();
if ( geometryData.index ) {
geometry.setIndex( new BufferAttribute( geometryData.index.array, 1 ) );
}
for ( let i = 0; i < geometryData.attributes.length; i ++ ) {
const result = geometryData.attributes[ i ];
const name = result.name;
const array = result.array;
const itemSize = result.itemSize;
const attribute = new BufferAttribute( array, itemSize );
if ( name === 'color' ) {
this._assignVertexColorSpace( attribute, result.vertexColorSpace );
attribute.normalized = ( array instanceof Float32Array ) === false;
}
geometry.setAttribute( name, attribute );
}
return geometry;
}
_assignVertexColorSpace( attribute, inputColorSpace ) {
// While .drc files do not specify colorspace, the only 'official' tooling
// is PLY and OBJ converters, which use sRGB. We'll assume sRGB when a .drc
// file is passed into .load() or .parse(). GLTFLoader uses internal APIs
// to decode geometry, and vertex colors are already Linear-sRGB in there.
if ( inputColorSpace !== SRGBColorSpace ) return;
const _color = new Color();
for ( let i = 0, il = attribute.count; i < il; i ++ ) {
_color.fromBufferAttribute( attribute, i ).convertSRGBToLinear();
attribute.setXYZ( i, _color.r, _color.g, _color.b );
}
}
_loadLibrary( url, responseType ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.decoderPath );
loader.setResponseType( responseType );
loader.setWithCredentials( this.withCredentials );
return new Promise( ( resolve, reject ) => {
loader.load( url, resolve, undefined, reject );
} );
}
preload() {
this._initDecoder();
return this;
}
_initDecoder() {
if ( this.decoderPending ) return this.decoderPending;
const useJS = typeof WebAssembly !== 'object' || this.decoderConfig.type === 'js';
const librariesPending = [];
if ( useJS ) {
librariesPending.push( this._loadLibrary( 'draco_decoder.js', 'text' ) );
} else {
librariesPending.push( this._loadLibrary( 'draco_wasm_wrapper.js', 'text' ) );
librariesPending.push( this._loadLibrary( 'draco_decoder.wasm', 'arraybuffer' ) );
}
this.decoderPending = Promise.all( librariesPending )
.then( ( libraries ) => {
const jsContent = libraries[ 0 ];
if ( ! useJS ) {
this.decoderConfig.wasmBinary = libraries[ 1 ];
}
const fn = DRACOWorker.toString();
const body = [
'/* draco decoder */',
jsContent,
'',
'/* worker */',
fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) )
].join( '\n' );
this.workerSourceURL = URL.createObjectURL( new Blob( [ body ] ) );
} );
return this.decoderPending;
}
_getWorker( taskID, taskCost ) {
return this._initDecoder().then( () => {
if ( this.workerPool.length < this.workerLimit ) {
const worker = new Worker( this.workerSourceURL );
worker._callbacks = {};
worker._taskCosts = {};
worker._taskLoad = 0;
worker.postMessage( { type: 'init', decoderConfig: this.decoderConfig } );
worker.onmessage = function ( e ) {
const message = e.data;
switch ( message.type ) {
case 'decode':
worker._callbacks[ message.id ].resolve( message );
break;
case 'error':
worker._callbacks[ message.id ].reject( message );
break;
default:
console.error( 'THREE.DRACOLoader: Unexpected message, "' + message.type + '"' );
}
};
this.workerPool.push( worker );
} else {
this.workerPool.sort( function ( a, b ) {
return a._taskLoad > b._taskLoad ? - 1 : 1;
} );
}
const worker = this.workerPool[ this.workerPool.length - 1 ];
worker._taskCosts[ taskID ] = taskCost;
worker._taskLoad += taskCost;
return worker;
} );
}
_releaseTask( worker, taskID ) {
worker._taskLoad -= worker._taskCosts[ taskID ];
delete worker._callbacks[ taskID ];
delete worker._taskCosts[ taskID ];
}
debug() {
console.log( 'Task load: ', this.workerPool.map( ( worker ) => worker._taskLoad ) );
}
dispose() {
for ( let i = 0; i < this.workerPool.length; ++ i ) {
this.workerPool[ i ].terminate();
}
this.workerPool.length = 0;
if ( this.workerSourceURL !== '' ) {
URL.revokeObjectURL( this.workerSourceURL );
}
return this;
}
}
/* WEB WORKER */
function DRACOWorker() {
let decoderConfig;
let decoderPending;
onmessage = function ( e ) {
const message = e.data;
switch ( message.type ) {
case 'init':
decoderConfig = message.decoderConfig;
decoderPending = new Promise( function ( resolve/*, reject*/ ) {
decoderConfig.onModuleLoaded = function ( draco ) {
// Module is Promise-like. Wrap before resolving to avoid loop.
resolve( { draco: draco } );
};
DracoDecoderModule( decoderConfig ); // eslint-disable-line no-undef
} );
break;
case 'decode':
const buffer = message.buffer;
const taskConfig = message.taskConfig;
decoderPending.then( ( module ) => {
const draco = module.draco;
const decoder = new draco.Decoder();
try {
const geometry = decodeGeometry( draco, decoder, new Int8Array( buffer ), taskConfig );
const buffers = geometry.attributes.map( ( attr ) => attr.array.buffer );
if ( geometry.index ) buffers.push( geometry.index.array.buffer );
self.postMessage( { type: 'decode', id: message.id, geometry }, buffers );
} catch ( error ) {
console.error( error );
self.postMessage( { type: 'error', id: message.id, error: error.message } );
} finally {
draco.destroy( decoder );
}
} );
break;
}
};
function decodeGeometry( draco, decoder, array, taskConfig ) {
const attributeIDs = taskConfig.attributeIDs;
const attributeTypes = taskConfig.attributeTypes;
let dracoGeometry;
let decodingStatus;
const geometryType = decoder.GetEncodedGeometryType( array );
if ( geometryType === draco.TRIANGULAR_MESH ) {
dracoGeometry = new draco.Mesh();
decodingStatus = decoder.DecodeArrayToMesh( array, array.byteLength, dracoGeometry );
} else if ( geometryType === draco.POINT_CLOUD ) {
dracoGeometry = new draco.PointCloud();
decodingStatus = decoder.DecodeArrayToPointCloud( array, array.byteLength, dracoGeometry );
} else {
throw new Error( 'THREE.DRACOLoader: Unexpected geometry type.' );
}
if ( ! decodingStatus.ok() || dracoGeometry.ptr === 0 ) {
throw new Error( 'THREE.DRACOLoader: Decoding failed: ' + decodingStatus.error_msg() );
}
const geometry = { index: null, attributes: [] };
// Gather all vertex attributes.
for ( const attributeName in attributeIDs ) {
const attributeType = self[ attributeTypes[ attributeName ] ];
let attribute;
let attributeID;
// A Draco file may be created with default vertex attributes, whose attribute IDs
// are mapped 1:1 from their semantic name (POSITION, NORMAL, ...). Alternatively,
// a Draco file may contain a custom set of attributes, identified by known unique
// IDs. glTF files always do the latter, and `.drc` files typically do the former.
if ( taskConfig.useUniqueIDs ) {
attributeID = attributeIDs[ attributeName ];
attribute = decoder.GetAttributeByUniqueId( dracoGeometry, attributeID );
} else {
attributeID = decoder.GetAttributeId( dracoGeometry, draco[ attributeIDs[ attributeName ] ] );
if ( attributeID === - 1 ) continue;
attribute = decoder.GetAttribute( dracoGeometry, attributeID );
}
const attributeResult = decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute );
if ( attributeName === 'color' ) {
attributeResult.vertexColorSpace = taskConfig.vertexColorSpace;
}
geometry.attributes.push( attributeResult );
}
// Add index.
if ( geometryType === draco.TRIANGULAR_MESH ) {
geometry.index = decodeIndex( draco, decoder, dracoGeometry );
}
draco.destroy( dracoGeometry );
return geometry;
}
function decodeIndex( draco, decoder, dracoGeometry ) {
const numFaces = dracoGeometry.num_faces();
const numIndices = numFaces * 3;
const byteLength = numIndices * 4;
const ptr = draco._malloc( byteLength );
decoder.GetTrianglesUInt32Array( dracoGeometry, byteLength, ptr );
const index = new Uint32Array( draco.HEAPF32.buffer, ptr, numIndices ).slice();
draco._free( ptr );
return { array: index, itemSize: 1 };
}
function decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute ) {
const numComponents = attribute.num_components();
const numPoints = dracoGeometry.num_points();
const numValues = numPoints * numComponents;
const byteLength = numValues * attributeType.BYTES_PER_ELEMENT;
const dataType = getDracoDataType( draco, attributeType );
const ptr = draco._malloc( byteLength );
decoder.GetAttributeDataArrayForAllPoints( dracoGeometry, attribute, dataType, byteLength, ptr );
const array = new attributeType( draco.HEAPF32.buffer, ptr, numValues ).slice();
draco._free( ptr );
return {
name: attributeName,
array: array,
itemSize: numComponents
};
}
function getDracoDataType( draco, attributeType ) {
switch ( attributeType ) {
case Float32Array: return draco.DT_FLOAT32;
case Int8Array: return draco.DT_INT8;
case Int16Array: return draco.DT_INT16;
case Int32Array: return draco.DT_INT32;
case Uint8Array: return draco.DT_UINT8;
case Uint16Array: return draco.DT_UINT16;
case Uint32Array: return draco.DT_UINT32;
}
}
}
export { DRACOLoader };

View File

@ -0,0 +1,171 @@
import {
Vector2
} from '/static/javascript/three/build/three.module.js';
/**
* TODO
*/
const DepthLimitedBlurShader = {
name: 'DepthLimitedBlurShader',
defines: {
'KERNEL_RADIUS': 4,
'DEPTH_PACKING': 1,
'PERSPECTIVE_CAMERA': 1
},
uniforms: {
'tDiffuse': { value: null },
'size': { value: new Vector2( 512, 512 ) },
'sampleUvOffsets': { value: [ new Vector2( 0, 0 ) ] },
'sampleWeights': { value: [ 1.0 ] },
'tDepth': { value: null },
'cameraNear': { value: 10 },
'cameraFar': { value: 1000 },
'depthCutoff': { value: 10 },
},
vertexShader: /* glsl */`
#include <common>
uniform vec2 size;
varying vec2 vUv;
varying vec2 vInvSize;
void main() {
vUv = uv;
vInvSize = 1.0 / size;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
#include <common>
#include <packing>
uniform sampler2D tDiffuse;
uniform sampler2D tDepth;
uniform float cameraNear;
uniform float cameraFar;
uniform float depthCutoff;
uniform vec2 sampleUvOffsets[ KERNEL_RADIUS + 1 ];
uniform float sampleWeights[ KERNEL_RADIUS + 1 ];
varying vec2 vUv;
varying vec2 vInvSize;
float getDepth( const in vec2 screenPosition ) {
#if DEPTH_PACKING == 1
return unpackRGBAToDepth( texture2D( tDepth, screenPosition ) );
#else
return texture2D( tDepth, screenPosition ).x;
#endif
}
float getViewZ( const in float depth ) {
#if PERSPECTIVE_CAMERA == 1
return perspectiveDepthToViewZ( depth, cameraNear, cameraFar );
#else
return orthographicDepthToViewZ( depth, cameraNear, cameraFar );
#endif
}
void main() {
float depth = getDepth( vUv );
if( depth >= ( 1.0 - EPSILON ) ) {
discard;
}
float centerViewZ = -getViewZ( depth );
bool rBreak = false, lBreak = false;
float weightSum = sampleWeights[0];
vec4 diffuseSum = texture2D( tDiffuse, vUv ) * weightSum;
for( int i = 1; i <= KERNEL_RADIUS; i ++ ) {
float sampleWeight = sampleWeights[i];
vec2 sampleUvOffset = sampleUvOffsets[i] * vInvSize;
vec2 sampleUv = vUv + sampleUvOffset;
float viewZ = -getViewZ( getDepth( sampleUv ) );
if( abs( viewZ - centerViewZ ) > depthCutoff ) rBreak = true;
if( ! rBreak ) {
diffuseSum += texture2D( tDiffuse, sampleUv ) * sampleWeight;
weightSum += sampleWeight;
}
sampleUv = vUv - sampleUvOffset;
viewZ = -getViewZ( getDepth( sampleUv ) );
if( abs( viewZ - centerViewZ ) > depthCutoff ) lBreak = true;
if( ! lBreak ) {
diffuseSum += texture2D( tDiffuse, sampleUv ) * sampleWeight;
weightSum += sampleWeight;
}
}
gl_FragColor = diffuseSum / weightSum;
}`
};
const BlurShaderUtils = {
createSampleWeights: function ( kernelRadius, stdDev ) {
const weights = [];
for ( let i = 0; i <= kernelRadius; i ++ ) {
weights.push( gaussian( i, stdDev ) );
}
return weights;
},
createSampleOffsets: function ( kernelRadius, uvIncrement ) {
const offsets = [];
for ( let i = 0; i <= kernelRadius; i ++ ) {
offsets.push( uvIncrement.clone().multiplyScalar( i ) );
}
return offsets;
},
configure: function ( material, kernelRadius, stdDev, uvIncrement ) {
material.defines[ 'KERNEL_RADIUS' ] = kernelRadius;
material.uniforms[ 'sampleUvOffsets' ].value = BlurShaderUtils.createSampleOffsets( kernelRadius, uvIncrement );
material.uniforms[ 'sampleWeights' ].value = BlurShaderUtils.createSampleWeights( kernelRadius, stdDev );
material.needsUpdate = true;
}
};
function gaussian( x, stdDev ) {
return Math.exp( - ( x * x ) / ( 2.0 * ( stdDev * stdDev ) ) ) / ( Math.sqrt( 2.0 * Math.PI ) * stdDev );
}
export { DepthLimitedBlurShader, BlurShaderUtils };

View File

@ -0,0 +1,101 @@
/**
* RGB Shift Shader
* Shifts red and blue channels from center in opposite directions
* Ported from http://kriss.cx/tom/2009/05/rgb-shift/
* by Tom Butterworth / http://kriss.cx/tom/
*
* amount: shift distance (1 is width of input)
* angle: shift angle in radians
*/
const DigitalGlitch = {
uniforms: {
'tDiffuse': { value: null }, //diffuse texture
'tDisp': { value: null }, //displacement texture for digital glitch squares
'byp': { value: 0 }, //apply the glitch ?
'amount': { value: 0.08 },
'angle': { value: 0.02 },
'seed': { value: 0.02 },
'seed_x': { value: 0.02 }, //-1,1
'seed_y': { value: 0.02 }, //-1,1
'distortion_x': { value: 0.5 },
'distortion_y': { value: 0.6 },
'col_s': { value: 0.05 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform int byp; //should we apply the glitch ?
uniform sampler2D tDiffuse;
uniform sampler2D tDisp;
uniform float amount;
uniform float angle;
uniform float seed;
uniform float seed_x;
uniform float seed_y;
uniform float distortion_x;
uniform float distortion_y;
uniform float col_s;
varying vec2 vUv;
float rand(vec2 co){
return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);
}
void main() {
if(byp<1) {
vec2 p = vUv;
float xs = floor(gl_FragCoord.x / 0.5);
float ys = floor(gl_FragCoord.y / 0.5);
//based on staffantans glitch shader for unity https://github.com/staffantan/unityglitch
float disp = texture2D(tDisp, p*seed*seed).r;
if(p.y<distortion_x+col_s && p.y>distortion_x-col_s*seed) {
if(seed_x>0.){
p.y = 1. - (p.y + distortion_y);
}
else {
p.y = distortion_y;
}
}
if(p.x<distortion_y+col_s && p.x>distortion_y-col_s*seed) {
if(seed_y>0.){
p.x=distortion_x;
}
else {
p.x = 1. - (p.x + distortion_x);
}
}
p.x+=disp*seed_x*(seed/5.);
p.y+=disp*seed_y*(seed/5.);
//base from RGB shift shader
vec2 offset = amount * vec2( cos(angle), sin(angle));
vec4 cr = texture2D(tDiffuse, p + offset);
vec4 cga = texture2D(tDiffuse, p);
vec4 cb = texture2D(tDiffuse, p - offset);
gl_FragColor = vec4(cr.r, cga.g, cb.b, cga.a);
//add noise
vec4 snow = 200.*amount*vec4(rand(vec2(xs * seed,ys * seed*50.))*0.2);
gl_FragColor = gl_FragColor+ snow;
}
else {
gl_FragColor=texture2D (tDiffuse, vUv);
}
}`
};
export { DigitalGlitch };

View File

@ -0,0 +1,65 @@
import {
ShaderMaterial,
UniformsUtils
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from'/static/javascript/three/examples/jsm/postprocessing/Pass.js';
import { DotScreenShader } from'/static/javascript/three/examples/jsm/shaders/DotScreenShader.js';
class DotScreenPass extends Pass {
constructor( center, angle, scale ) {
super();
const shader = DotScreenShader;
this.uniforms = UniformsUtils.clone( shader.uniforms );
if ( center !== undefined ) this.uniforms[ 'center' ].value.copy( center );
if ( angle !== undefined ) this.uniforms[ 'angle' ].value = angle;
if ( scale !== undefined ) this.uniforms[ 'scale' ].value = scale;
this.material = new ShaderMaterial( {
name: shader.name,
uniforms: this.uniforms,
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader
} );
this.fsQuad = new FullScreenQuad( this.material );
}
render( renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) {
this.uniforms[ 'tDiffuse' ].value = readBuffer.texture;
this.uniforms[ 'tSize' ].value.set( readBuffer.width, readBuffer.height );
if ( this.renderToScreen ) {
renderer.setRenderTarget( null );
this.fsQuad.render( renderer );
} else {
renderer.setRenderTarget( writeBuffer );
if ( this.clear ) renderer.clear();
this.fsQuad.render( renderer );
}
}
dispose() {
this.material.dispose();
this.fsQuad.dispose();
}
}
export { DotScreenPass };

View File

@ -0,0 +1,70 @@
import {
Vector2
} from '/static/javascript/three/build/three.module.js';
/**
* Dot screen shader
* based on glfx.js sepia shader
* https://github.com/evanw/glfx.js
*/
const DotScreenShader = {
name: 'DotScreenShader',
uniforms: {
'tDiffuse': { value: null },
'tSize': { value: new Vector2( 256, 256 ) },
'center': { value: new Vector2( 0.5, 0.5 ) },
'angle': { value: 1.57 },
'scale': { value: 1.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform vec2 center;
uniform float angle;
uniform float scale;
uniform vec2 tSize;
uniform sampler2D tDiffuse;
varying vec2 vUv;
float pattern() {
float s = sin( angle ), c = cos( angle );
vec2 tex = vUv * tSize - center;
vec2 point = vec2( c * tex.x - s * tex.y, s * tex.x + c * tex.y ) * scale;
return ( sin( point.x ) * sin( point.y ) ) * 4.0;
}
void main() {
vec4 color = texture2D( tDiffuse, vUv );
float average = ( color.r + color.g + color.b ) / 3.0;
gl_FragColor = vec4( vec3( average * 10.0 - 5.0 + pattern() ), color.a );
}`
};
export { DotScreenShader };

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,44 @@
/**
* Exposure shader
*/
const ExposureShader = {
name: 'ExposureShader',
uniforms: {
'tDiffuse': { value: null },
'exposure': { value: 1.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float exposure;
uniform sampler2D tDiffuse;
varying vec2 vUv;
void main() {
gl_FragColor = texture2D( tDiffuse, vUv );
gl_FragColor.rgb *= exposure;
}`
};
export { ExposureShader };

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,286 @@
import {
Vector2
} from '/static/javascript/three/build/three.module.js';
/**
* NVIDIA FXAA by Timothy Lottes
* https://developer.download.nvidia.com/assets/gamedev/files/sdk/11/FXAA_WhitePaper.pdf
* - WebGL port by @supereggbert
* http://www.glge.org/demos/fxaa/
* Further improved by Daniel Sturk
*/
const FXAAShader = {
name: 'FXAAShader',
uniforms: {
'tDiffuse': { value: null },
'resolution': { value: new Vector2( 1 / 1024, 1 / 512 ) }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
precision highp float;
uniform sampler2D tDiffuse;
uniform vec2 resolution;
varying vec2 vUv;
// FXAA 3.11 implementation by NVIDIA, ported to WebGL by Agost Biro (biro@archilogic.com)
//----------------------------------------------------------------------------------
// File: es3-kepler\FXAA\assets\shaders/FXAA_DefaultES.frag
// SDK Version: v3.00
// Email: gameworks@nvidia.com
// Site: http://developer.nvidia.com/
//
// Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ''AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//----------------------------------------------------------------------------------
#ifndef FXAA_DISCARD
//
// Only valid for PC OpenGL currently.
// Probably will not work when FXAA_GREEN_AS_LUMA = 1.
//
// 1 = Use discard on pixels which don't need AA.
// For APIs which enable concurrent TEX+ROP from same surface.
// 0 = Return unchanged color on pixels which don't need AA.
//
#define FXAA_DISCARD 0
#endif
/*--------------------------------------------------------------------------*/
#define FxaaTexTop(t, p) texture2D(t, p, -100.0)
#define FxaaTexOff(t, p, o, r) texture2D(t, p + (o * r), -100.0)
/*--------------------------------------------------------------------------*/
#define NUM_SAMPLES 5
// assumes colors have premultipliedAlpha, so that the calculated color contrast is scaled by alpha
float contrast( vec4 a, vec4 b ) {
vec4 diff = abs( a - b );
return max( max( max( diff.r, diff.g ), diff.b ), diff.a );
}
/*============================================================================
FXAA3 QUALITY - PC
============================================================================*/
/*--------------------------------------------------------------------------*/
vec4 FxaaPixelShader(
vec2 posM,
sampler2D tex,
vec2 fxaaQualityRcpFrame,
float fxaaQualityEdgeThreshold,
float fxaaQualityinvEdgeThreshold
) {
vec4 rgbaM = FxaaTexTop(tex, posM);
vec4 rgbaS = FxaaTexOff(tex, posM, vec2( 0.0, 1.0), fxaaQualityRcpFrame.xy);
vec4 rgbaE = FxaaTexOff(tex, posM, vec2( 1.0, 0.0), fxaaQualityRcpFrame.xy);
vec4 rgbaN = FxaaTexOff(tex, posM, vec2( 0.0,-1.0), fxaaQualityRcpFrame.xy);
vec4 rgbaW = FxaaTexOff(tex, posM, vec2(-1.0, 0.0), fxaaQualityRcpFrame.xy);
// . S .
// W M E
// . N .
bool earlyExit = max( max( max(
contrast( rgbaM, rgbaN ),
contrast( rgbaM, rgbaS ) ),
contrast( rgbaM, rgbaE ) ),
contrast( rgbaM, rgbaW ) )
< fxaaQualityEdgeThreshold;
// . 0 .
// 0 0 0
// . 0 .
#if (FXAA_DISCARD == 1)
if(earlyExit) FxaaDiscard;
#else
if(earlyExit) return rgbaM;
#endif
float contrastN = contrast( rgbaM, rgbaN );
float contrastS = contrast( rgbaM, rgbaS );
float contrastE = contrast( rgbaM, rgbaE );
float contrastW = contrast( rgbaM, rgbaW );
float relativeVContrast = ( contrastN + contrastS ) - ( contrastE + contrastW );
relativeVContrast *= fxaaQualityinvEdgeThreshold;
bool horzSpan = relativeVContrast > 0.;
// . 1 .
// 0 0 0
// . 1 .
// 45 deg edge detection and corners of objects, aka V/H contrast is too similar
if( abs( relativeVContrast ) < .3 ) {
// locate the edge
vec2 dirToEdge;
dirToEdge.x = contrastE > contrastW ? 1. : -1.;
dirToEdge.y = contrastS > contrastN ? 1. : -1.;
// . 2 . . 1 .
// 1 0 2 ~= 0 0 1
// . 1 . . 0 .
// tap 2 pixels and see which ones are "outside" the edge, to
// determine if the edge is vertical or horizontal
vec4 rgbaAlongH = FxaaTexOff(tex, posM, vec2( dirToEdge.x, -dirToEdge.y ), fxaaQualityRcpFrame.xy);
float matchAlongH = contrast( rgbaM, rgbaAlongH );
// . 1 .
// 0 0 1
// . 0 H
vec4 rgbaAlongV = FxaaTexOff(tex, posM, vec2( -dirToEdge.x, dirToEdge.y ), fxaaQualityRcpFrame.xy);
float matchAlongV = contrast( rgbaM, rgbaAlongV );
// V 1 .
// 0 0 1
// . 0 .
relativeVContrast = matchAlongV - matchAlongH;
relativeVContrast *= fxaaQualityinvEdgeThreshold;
if( abs( relativeVContrast ) < .3 ) { // 45 deg edge
// 1 1 .
// 0 0 1
// . 0 1
// do a simple blur
return mix(
rgbaM,
(rgbaN + rgbaS + rgbaE + rgbaW) * .25,
.4
);
}
horzSpan = relativeVContrast > 0.;
}
if(!horzSpan) rgbaN = rgbaW;
if(!horzSpan) rgbaS = rgbaE;
// . 0 . 1
// 1 0 1 -> 0
// . 0 . 1
bool pairN = contrast( rgbaM, rgbaN ) > contrast( rgbaM, rgbaS );
if(!pairN) rgbaN = rgbaS;
vec2 offNP;
offNP.x = (!horzSpan) ? 0.0 : fxaaQualityRcpFrame.x;
offNP.y = ( horzSpan) ? 0.0 : fxaaQualityRcpFrame.y;
bool doneN = false;
bool doneP = false;
float nDist = 0.;
float pDist = 0.;
vec2 posN = posM;
vec2 posP = posM;
int iterationsUsedN = 0;
int iterationsUsedP = 0;
for( int i = 0; i < NUM_SAMPLES; i++ ) {
float increment = float(i + 1);
if(!doneN) {
nDist += increment;
posN = posM + offNP * nDist;
vec4 rgbaEndN = FxaaTexTop(tex, posN.xy);
doneN = contrast( rgbaEndN, rgbaM ) > contrast( rgbaEndN, rgbaN );
iterationsUsedN = i;
}
if(!doneP) {
pDist += increment;
posP = posM - offNP * pDist;
vec4 rgbaEndP = FxaaTexTop(tex, posP.xy);
doneP = contrast( rgbaEndP, rgbaM ) > contrast( rgbaEndP, rgbaN );
iterationsUsedP = i;
}
if(doneN || doneP) break;
}
if ( !doneP && !doneN ) return rgbaM; // failed to find end of edge
float dist = min(
doneN ? float( iterationsUsedN ) / float( NUM_SAMPLES - 1 ) : 1.,
doneP ? float( iterationsUsedP ) / float( NUM_SAMPLES - 1 ) : 1.
);
// hacky way of reduces blurriness of mostly diagonal edges
// but reduces AA quality
dist = pow(dist, .5);
dist = 1. - dist;
return mix(
rgbaM,
rgbaN,
dist * .5
);
}
void main() {
const float edgeDetectionQuality = .2;
const float invEdgeDetectionQuality = 1. / edgeDetectionQuality;
gl_FragColor = FxaaPixelShader(
vUv,
tDiffuse,
resolution,
edgeDetectionQuality, // [0,1] contrast needed, otherwise early discard
invEdgeDetectionQuality
);
}
`
};
export { FXAAShader };

View File

@ -0,0 +1,64 @@
import {
ShaderMaterial,
UniformsUtils
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from './Pass.js';
import { FilmShader } from '../shaders/FilmShader.js';
class FilmPass extends Pass {
constructor( intensity = 0.5, grayscale = false ) {
super();
const shader = FilmShader;
this.uniforms = UniformsUtils.clone( shader.uniforms );
this.material = new ShaderMaterial( {
name: shader.name,
uniforms: this.uniforms,
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader
} );
this.uniforms.intensity.value = intensity; // (0 = no effect, 1 = full effect)
this.uniforms.grayscale.value = grayscale;
this.fsQuad = new FullScreenQuad( this.material );
}
render( renderer, writeBuffer, readBuffer, deltaTime /*, maskActive */ ) {
this.uniforms[ 'tDiffuse' ].value = readBuffer.texture;
this.uniforms[ 'time' ].value += deltaTime;
if ( this.renderToScreen ) {
renderer.setRenderTarget( null );
this.fsQuad.render( renderer );
} else {
renderer.setRenderTarget( writeBuffer );
if ( this.clear ) renderer.clear();
this.fsQuad.render( renderer );
}
}
dispose() {
this.material.dispose();
this.fsQuad.dispose();
}
}
export { FilmPass };

View File

@ -0,0 +1,59 @@
const FilmShader = {
name: 'FilmShader',
uniforms: {
'tDiffuse': { value: null },
'time': { value: 0.0 },
'intensity': { value: 0.5 },
'grayscale': { value: false }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
#include <common>
uniform float intensity;
uniform bool grayscale;
uniform float time;
uniform sampler2D tDiffuse;
varying vec2 vUv;
void main() {
vec4 base = texture2D( tDiffuse, vUv );
float noise = rand( fract( vUv + time ) );
vec3 color = base.rgb + base.rgb * clamp( 0.1 + noise, 0.0, 1.0 );
color = mix( base.rgb, color, intensity );
if ( grayscale ) {
color = vec3( luminance( color ) ); // assuming linear-srgb
}
gl_FragColor = vec4( color, base.a );
}`,
};
export { FilmShader };

View File

@ -0,0 +1,89 @@
/**
* Focus shader
* based on PaintEffect postprocess from ro.me
* http://code.google.com/p/3-dreams-of-black/source/browse/deploy/js/effects/PaintEffect.js
*/
const FocusShader = {
name: 'FocusShader',
uniforms: {
'tDiffuse': { value: null },
'screenWidth': { value: 1024 },
'screenHeight': { value: 1024 },
'sampleDistance': { value: 0.94 },
'waveFactor': { value: 0.00125 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float screenWidth;
uniform float screenHeight;
uniform float sampleDistance;
uniform float waveFactor;
uniform sampler2D tDiffuse;
varying vec2 vUv;
void main() {
vec4 color, org, tmp, add;
float sample_dist, f;
vec2 vin;
vec2 uv = vUv;
add = color = org = texture2D( tDiffuse, uv );
vin = ( uv - vec2( 0.5 ) ) * vec2( 1.4 );
sample_dist = dot( vin, vin ) * 2.0;
f = ( waveFactor * 100.0 + sample_dist ) * sampleDistance * 4.0;
vec2 sampleSize = vec2( 1.0 / screenWidth, 1.0 / screenHeight ) * vec2( f );
add += tmp = texture2D( tDiffuse, uv + vec2( 0.111964, 0.993712 ) * sampleSize );
if( tmp.b < color.b ) color = tmp;
add += tmp = texture2D( tDiffuse, uv + vec2( 0.846724, 0.532032 ) * sampleSize );
if( tmp.b < color.b ) color = tmp;
add += tmp = texture2D( tDiffuse, uv + vec2( 0.943883, -0.330279 ) * sampleSize );
if( tmp.b < color.b ) color = tmp;
add += tmp = texture2D( tDiffuse, uv + vec2( 0.330279, -0.943883 ) * sampleSize );
if( tmp.b < color.b ) color = tmp;
add += tmp = texture2D( tDiffuse, uv + vec2( -0.532032, -0.846724 ) * sampleSize );
if( tmp.b < color.b ) color = tmp;
add += tmp = texture2D( tDiffuse, uv + vec2( -0.993712, -0.111964 ) * sampleSize );
if( tmp.b < color.b ) color = tmp;
add += tmp = texture2D( tDiffuse, uv + vec2( -0.707107, 0.707107 ) * sampleSize );
if( tmp.b < color.b ) color = tmp;
color = color * vec4( 2.0 ) - ( add / vec4( 8.0 ) );
color = color + ( add / vec4( 8.0 ) - color ) * ( vec4( 1.0 ) - vec4( sample_dist * 0.5 ) );
gl_FragColor = vec4( color.rgb * color.rgb * vec3( 0.95 ) + color.rgb, 1.0 );
}`
};
export { FocusShader };

View File

@ -0,0 +1,183 @@
import {
FileLoader,
Loader,
ShapePath
} from '/static/javascript/three/build/three.module.js';
class FontLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
const font = scope.parse( JSON.parse( text ) );
if ( onLoad ) onLoad( font );
}, onProgress, onError );
}
parse( json ) {
return new Font( json );
}
}
//
class Font {
constructor( data ) {
this.isFont = true;
this.type = 'Font';
this.data = data;
}
generateShapes( text, size = 100 ) {
const shapes = [];
const paths = createPaths( text, size, this.data );
for ( let p = 0, pl = paths.length; p < pl; p ++ ) {
shapes.push( ...paths[ p ].toShapes() );
}
return shapes;
}
}
function createPaths( text, size, data ) {
const chars = Array.from( text );
const scale = size / data.resolution;
const line_height = ( data.boundingBox.yMax - data.boundingBox.yMin + data.underlineThickness ) * scale;
const paths = [];
let offsetX = 0, offsetY = 0;
for ( let i = 0; i < chars.length; i ++ ) {
const char = chars[ i ];
if ( char === '\n' ) {
offsetX = 0;
offsetY -= line_height;
} else {
const ret = createPath( char, scale, offsetX, offsetY, data );
offsetX += ret.offsetX;
paths.push( ret.path );
}
}
return paths;
}
function createPath( char, scale, offsetX, offsetY, data ) {
const glyph = data.glyphs[ char ] || data.glyphs[ '?' ];
if ( ! glyph ) {
console.error( 'THREE.Font: character "' + char + '" does not exists in font family ' + data.familyName + '.' );
return;
}
const path = new ShapePath();
let x, y, cpx, cpy, cpx1, cpy1, cpx2, cpy2;
if ( glyph.o ) {
const outline = glyph._cachedOutline || ( glyph._cachedOutline = glyph.o.split( ' ' ) );
for ( let i = 0, l = outline.length; i < l; ) {
const action = outline[ i ++ ];
switch ( action ) {
case 'm': // moveTo
x = outline[ i ++ ] * scale + offsetX;
y = outline[ i ++ ] * scale + offsetY;
path.moveTo( x, y );
break;
case 'l': // lineTo
x = outline[ i ++ ] * scale + offsetX;
y = outline[ i ++ ] * scale + offsetY;
path.lineTo( x, y );
break;
case 'q': // quadraticCurveTo
cpx = outline[ i ++ ] * scale + offsetX;
cpy = outline[ i ++ ] * scale + offsetY;
cpx1 = outline[ i ++ ] * scale + offsetX;
cpy1 = outline[ i ++ ] * scale + offsetY;
path.quadraticCurveTo( cpx1, cpy1, cpx, cpy );
break;
case 'b': // bezierCurveTo
cpx = outline[ i ++ ] * scale + offsetX;
cpy = outline[ i ++ ] * scale + offsetY;
cpx1 = outline[ i ++ ] * scale + offsetX;
cpy1 = outline[ i ++ ] * scale + offsetY;
cpx2 = outline[ i ++ ] * scale + offsetX;
cpy2 = outline[ i ++ ] * scale + offsetY;
path.bezierCurveTo( cpx1, cpy1, cpx2, cpy2, cpx, cpy );
break;
}
}
}
return { offsetX: glyph.ha * scale, path: path };
}
export { FontLoader, Font };

View File

@ -0,0 +1,96 @@
import {
Vector2
} from '/static/javascript/three/build/three.module.js';
/**
* Edge Detection Shader using Frei-Chen filter
* Based on http://rastergrid.com/blog/2011/01/frei-chen-edge-detector
*
* aspect: vec2 of (1/width, 1/height)
*/
const FreiChenShader = {
name: 'FreiChenShader',
uniforms: {
'tDiffuse': { value: null },
'aspect': { value: new Vector2( 512, 512 ) }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
varying vec2 vUv;
uniform vec2 aspect;
vec2 texel = vec2( 1.0 / aspect.x, 1.0 / aspect.y );
mat3 G[9];
// hard coded matrix values!!!! as suggested in https://github.com/neilmendoza/ofxPostProcessing/blob/master/src/EdgePass.cpp#L45
const mat3 g0 = mat3( 0.3535533845424652, 0, -0.3535533845424652, 0.5, 0, -0.5, 0.3535533845424652, 0, -0.3535533845424652 );
const mat3 g1 = mat3( 0.3535533845424652, 0.5, 0.3535533845424652, 0, 0, 0, -0.3535533845424652, -0.5, -0.3535533845424652 );
const mat3 g2 = mat3( 0, 0.3535533845424652, -0.5, -0.3535533845424652, 0, 0.3535533845424652, 0.5, -0.3535533845424652, 0 );
const mat3 g3 = mat3( 0.5, -0.3535533845424652, 0, -0.3535533845424652, 0, 0.3535533845424652, 0, 0.3535533845424652, -0.5 );
const mat3 g4 = mat3( 0, -0.5, 0, 0.5, 0, 0.5, 0, -0.5, 0 );
const mat3 g5 = mat3( -0.5, 0, 0.5, 0, 0, 0, 0.5, 0, -0.5 );
const mat3 g6 = mat3( 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.6666666865348816, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204 );
const mat3 g7 = mat3( -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, 0.6666666865348816, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408 );
const mat3 g8 = mat3( 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408 );
void main(void)
{
G[0] = g0,
G[1] = g1,
G[2] = g2,
G[3] = g3,
G[4] = g4,
G[5] = g5,
G[6] = g6,
G[7] = g7,
G[8] = g8;
mat3 I;
float cnv[9];
vec3 sample;
/* fetch the 3x3 neighbourhood and use the RGB vector's length as intensity value */
for (float i=0.0; i<3.0; i++) {
for (float j=0.0; j<3.0; j++) {
sample = texture2D(tDiffuse, vUv + texel * vec2(i-1.0,j-1.0) ).rgb;
I[int(i)][int(j)] = length(sample);
}
}
/* calculate the convolution values for all the masks */
for (int i=0; i<9; i++) {
float dp3 = dot(G[i][0], I[0]) + dot(G[i][1], I[1]) + dot(G[i][2], I[2]);
cnv[i] = dp3 * dp3;
}
float M = (cnv[0] + cnv[1]) + (cnv[2] + cnv[3]);
float S = (cnv[4] + cnv[5]) + (cnv[6] + cnv[7]) + (cnv[8] + M);
gl_FragColor = vec4(vec3(sqrt(M/S)), 1.0);
}`
};
export { FreiChenShader };

View File

@ -0,0 +1,261 @@
import {
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Group,
LineBasicMaterial,
LineSegments,
Loader
} from '/static/javascript/three/build/three.module.js';
/**
* GCodeLoader is used to load gcode files usually used for 3D printing or CNC applications.
*
* Gcode files are composed by commands used by machines to create objects.
*
* @class GCodeLoader
* @param {Manager} manager Loading manager.
*/
class GCodeLoader extends Loader {
constructor( manager ) {
super( manager );
this.splitLayer = false;
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
parse( data ) {
let state = { x: 0, y: 0, z: 0, e: 0, f: 0, extruding: false, relative: false };
const layers = [];
let currentLayer = undefined;
const pathMaterial = new LineBasicMaterial( { color: 0xFF0000 } );
pathMaterial.name = 'path';
const extrudingMaterial = new LineBasicMaterial( { color: 0x00FF00 } );
extrudingMaterial.name = 'extruded';
function newLayer( line ) {
currentLayer = { vertex: [], pathVertex: [], z: line.z };
layers.push( currentLayer );
}
//Create lie segment between p1 and p2
function addSegment( p1, p2 ) {
if ( currentLayer === undefined ) {
newLayer( p1 );
}
if ( state.extruding ) {
currentLayer.vertex.push( p1.x, p1.y, p1.z );
currentLayer.vertex.push( p2.x, p2.y, p2.z );
} else {
currentLayer.pathVertex.push( p1.x, p1.y, p1.z );
currentLayer.pathVertex.push( p2.x, p2.y, p2.z );
}
}
function delta( v1, v2 ) {
return state.relative ? v2 : v2 - v1;
}
function absolute( v1, v2 ) {
return state.relative ? v1 + v2 : v2;
}
const lines = data.replace( /;.+/g, '' ).split( '\n' );
for ( let i = 0; i < lines.length; i ++ ) {
const tokens = lines[ i ].split( ' ' );
const cmd = tokens[ 0 ].toUpperCase();
//Argumments
const args = {};
tokens.splice( 1 ).forEach( function ( token ) {
if ( token[ 0 ] !== undefined ) {
const key = token[ 0 ].toLowerCase();
const value = parseFloat( token.substring( 1 ) );
args[ key ] = value;
}
} );
//Process commands
//G0/G1 Linear Movement
if ( cmd === 'G0' || cmd === 'G1' ) {
const line = {
x: args.x !== undefined ? absolute( state.x, args.x ) : state.x,
y: args.y !== undefined ? absolute( state.y, args.y ) : state.y,
z: args.z !== undefined ? absolute( state.z, args.z ) : state.z,
e: args.e !== undefined ? absolute( state.e, args.e ) : state.e,
f: args.f !== undefined ? absolute( state.f, args.f ) : state.f,
};
//Layer change detection is or made by watching Z, it's made by watching when we extrude at a new Z position
if ( delta( state.e, line.e ) > 0 ) {
state.extruding = delta( state.e, line.e ) > 0;
if ( currentLayer == undefined || line.z != currentLayer.z ) {
newLayer( line );
}
}
addSegment( state, line );
state = line;
} else if ( cmd === 'G2' || cmd === 'G3' ) {
//G2/G3 - Arc Movement ( G2 clock wise and G3 counter clock wise )
//console.warn( 'THREE.GCodeLoader: Arc command not supported' );
} else if ( cmd === 'G90' ) {
//G90: Set to Absolute Positioning
state.relative = false;
} else if ( cmd === 'G91' ) {
//G91: Set to state.relative Positioning
state.relative = true;
} else if ( cmd === 'G92' ) {
//G92: Set Position
const line = state;
line.x = args.x !== undefined ? args.x : line.x;
line.y = args.y !== undefined ? args.y : line.y;
line.z = args.z !== undefined ? args.z : line.z;
line.e = args.e !== undefined ? args.e : line.e;
} else {
//console.warn( 'THREE.GCodeLoader: Command not supported:' + cmd );
}
}
function addObject( vertex, extruding, i ) {
const geometry = new BufferGeometry();
geometry.setAttribute( 'position', new Float32BufferAttribute( vertex, 3 ) );
const segments = new LineSegments( geometry, extruding ? extrudingMaterial : pathMaterial );
segments.name = 'layer' + i;
object.add( segments );
}
const object = new Group();
object.name = 'gcode';
if ( this.splitLayer ) {
for ( let i = 0; i < layers.length; i ++ ) {
const layer = layers[ i ];
addObject( layer.vertex, true, i );
addObject( layer.pathVertex, false, i );
}
} else {
const vertex = [],
pathVertex = [];
for ( let i = 0; i < layers.length; i ++ ) {
const layer = layers[ i ];
const layerVertex = layer.vertex;
const layerPathVertex = layer.pathVertex;
for ( let j = 0; j < layerVertex.length; j ++ ) {
vertex.push( layerVertex[ j ] );
}
for ( let j = 0; j < layerPathVertex.length; j ++ ) {
pathVertex.push( layerPathVertex[ j ] );
}
}
addObject( vertex, true, layers.length );
addObject( pathVertex, false, layers.length );
}
object.rotation.set( - Math.PI / 2, 0, 0 );
return object;
}
}
export { GCodeLoader };

View File

@ -0,0 +1,95 @@
import Stats from '../libs/stats.module.js';
// https://www.khronos.org/registry/webgl/extensions/EXT_disjoint_timer_query_webgl2/
export class GPUStatsPanel extends Stats.Panel {
constructor( context, name = 'GPU MS' ) {
super( name, '#f90', '#210' );
const extension = context.getExtension( 'EXT_disjoint_timer_query_webgl2' );
if ( extension === null ) {
console.warn( 'GPUStatsPanel: disjoint_time_query extension not available.' );
}
this.context = context;
this.extension = extension;
this.maxTime = 30;
this.activeQueries = 0;
this.startQuery = function () {
const gl = this.context;
const ext = this.extension;
if ( ext === null ) {
return;
}
// create the query object
const query = gl.createQuery();
gl.beginQuery( ext.TIME_ELAPSED_EXT, query );
this.activeQueries ++;
const checkQuery = () => {
// check if the query is available and valid
const available = gl.getQueryParameter( query, gl.QUERY_RESULT_AVAILABLE );
const disjoint = gl.getParameter( ext.GPU_DISJOINT_EXT );
const ns = gl.getQueryParameter( query, gl.QUERY_RESULT );
const ms = ns * 1e-6;
if ( available ) {
// update the display if it is valid
if ( ! disjoint ) {
this.update( ms, this.maxTime );
}
gl.deleteQuery( query );
this.activeQueries --;
} else if ( gl.isContextLost() === false ) {
// otherwise try again the next frame
requestAnimationFrame( checkQuery );
}
};
requestAnimationFrame( checkQuery );
};
this.endQuery = function () {
// finish the query measurement
const ext = this.extension;
const gl = this.context;
if ( ext === null ) {
return;
}
gl.endQuery( ext.TIME_ELAPSED_EXT );
};
}
}

View File

@ -0,0 +1,582 @@
import {
AddEquation,
Color,
CustomBlending,
DataTexture,
DepthTexture,
DepthStencilFormat,
DstAlphaFactor,
DstColorFactor,
HalfFloatType,
MeshNormalMaterial,
NearestFilter,
NoBlending,
RepeatWrapping,
RGBAFormat,
ShaderMaterial,
UniformsUtils,
UnsignedByteType,
UnsignedInt248Type,
WebGLRenderTarget,
ZeroFactor
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from './Pass.js';
import { generateMagicSquareNoise, GTAOShader, GTAODepthShader, GTAOBlendShader } from '../shaders/GTAOShader.js';
import { generatePdSamplePointInitializer, PoissonDenoiseShader } from '../shaders/PoissonDenoiseShader.js';
import { CopyShader } from '../shaders/CopyShader.js';
import { SimplexNoise } from '../math/SimplexNoise.js';
class GTAOPass extends Pass {
constructor( scene, camera, width, height, parameters, aoParameters, pdParameters ) {
super();
this.width = ( width !== undefined ) ? width : 512;
this.height = ( height !== undefined ) ? height : 512;
this.clear = true;
this.camera = camera;
this.scene = scene;
this.output = 0;
this._renderGBuffer = true;
this._visibilityCache = new Map();
this.blendIntensity = 1.;
this.pdRings = 2.;
this.pdRadiusExponent = 2.;
this.pdSamples = 16;
this.gtaoNoiseTexture = generateMagicSquareNoise();
this.pdNoiseTexture = this.generateNoise();
this.gtaoRenderTarget = new WebGLRenderTarget( this.width, this.height, { type: HalfFloatType } );
this.pdRenderTarget = this.gtaoRenderTarget.clone();
this.gtaoMaterial = new ShaderMaterial( {
defines: Object.assign( {}, GTAOShader.defines ),
uniforms: UniformsUtils.clone( GTAOShader.uniforms ),
vertexShader: GTAOShader.vertexShader,
fragmentShader: GTAOShader.fragmentShader,
blending: NoBlending,
depthTest: false,
depthWrite: false,
} );
this.gtaoMaterial.defines.PERSPECTIVE_CAMERA = this.camera.isPerspectiveCamera ? 1 : 0;
this.gtaoMaterial.uniforms.tNoise.value = this.gtaoNoiseTexture;
this.gtaoMaterial.uniforms.resolution.value.set( this.width, this.height );
this.gtaoMaterial.uniforms.cameraNear.value = this.camera.near;
this.gtaoMaterial.uniforms.cameraFar.value = this.camera.far;
this.normalMaterial = new MeshNormalMaterial();
this.normalMaterial.blending = NoBlending;
this.pdMaterial = new ShaderMaterial( {
defines: Object.assign( {}, PoissonDenoiseShader.defines ),
uniforms: UniformsUtils.clone( PoissonDenoiseShader.uniforms ),
vertexShader: PoissonDenoiseShader.vertexShader,
fragmentShader: PoissonDenoiseShader.fragmentShader,
depthTest: false,
depthWrite: false,
} );
this.pdMaterial.uniforms.tDiffuse.value = this.gtaoRenderTarget.texture;
this.pdMaterial.uniforms.tNoise.value = this.pdNoiseTexture;
this.pdMaterial.uniforms.resolution.value.set( this.width, this.height );
this.pdMaterial.uniforms.lumaPhi.value = 10;
this.pdMaterial.uniforms.depthPhi.value = 2;
this.pdMaterial.uniforms.normalPhi.value = 3;
this.pdMaterial.uniforms.radius.value = 8;
this.depthRenderMaterial = new ShaderMaterial( {
defines: Object.assign( {}, GTAODepthShader.defines ),
uniforms: UniformsUtils.clone( GTAODepthShader.uniforms ),
vertexShader: GTAODepthShader.vertexShader,
fragmentShader: GTAODepthShader.fragmentShader,
blending: NoBlending
} );
this.depthRenderMaterial.uniforms.cameraNear.value = this.camera.near;
this.depthRenderMaterial.uniforms.cameraFar.value = this.camera.far;
this.copyMaterial = new ShaderMaterial( {
uniforms: UniformsUtils.clone( CopyShader.uniforms ),
vertexShader: CopyShader.vertexShader,
fragmentShader: CopyShader.fragmentShader,
transparent: true,
depthTest: false,
depthWrite: false,
blendSrc: DstColorFactor,
blendDst: ZeroFactor,
blendEquation: AddEquation,
blendSrcAlpha: DstAlphaFactor,
blendDstAlpha: ZeroFactor,
blendEquationAlpha: AddEquation
} );
this.blendMaterial = new ShaderMaterial( {
uniforms: UniformsUtils.clone( GTAOBlendShader.uniforms ),
vertexShader: GTAOBlendShader.vertexShader,
fragmentShader: GTAOBlendShader.fragmentShader,
transparent: true,
depthTest: false,
depthWrite: false,
blending: CustomBlending,
blendSrc: DstColorFactor,
blendDst: ZeroFactor,
blendEquation: AddEquation,
blendSrcAlpha: DstAlphaFactor,
blendDstAlpha: ZeroFactor,
blendEquationAlpha: AddEquation
} );
this.fsQuad = new FullScreenQuad( null );
this.originalClearColor = new Color();
this.setGBuffer( parameters ? parameters.depthTexture : undefined, parameters ? parameters.normalTexture : undefined );
if ( aoParameters !== undefined ) {
this.updateGtaoMaterial( aoParameters );
}
if ( pdParameters !== undefined ) {
this.updatePdMaterial( pdParameters );
}
}
dispose() {
this.gtaoNoiseTexture.dispose();
this.pdNoiseTexture.dispose();
this.normalRenderTarget.dispose();
this.gtaoRenderTarget.dispose();
this.pdRenderTarget.dispose();
this.normalMaterial.dispose();
this.pdMaterial.dispose();
this.copyMaterial.dispose();
this.depthRenderMaterial.dispose();
this.fsQuad.dispose();
}
get gtaoMap() {
return this.pdRenderTarget.texture;
}
setGBuffer( depthTexture, normalTexture ) {
if ( depthTexture !== undefined ) {
this.depthTexture = depthTexture;
this.normalTexture = normalTexture;
this._renderGBuffer = false;
} else {
this.depthTexture = new DepthTexture();
this.depthTexture.format = DepthStencilFormat;
this.depthTexture.type = UnsignedInt248Type;
this.normalRenderTarget = new WebGLRenderTarget( this.width, this.height, {
minFilter: NearestFilter,
magFilter: NearestFilter,
type: HalfFloatType,
depthTexture: this.depthTexture
} );
this.normalTexture = this.normalRenderTarget.texture;
this._renderGBuffer = true;
}
const normalVectorType = ( this.normalTexture ) ? 1 : 0;
const depthValueSource = ( this.depthTexture === this.normalTexture ) ? 'w' : 'x';
this.gtaoMaterial.defines.NORMAL_VECTOR_TYPE = normalVectorType;
this.gtaoMaterial.defines.DEPTH_SWIZZLING = depthValueSource;
this.gtaoMaterial.uniforms.tNormal.value = this.normalTexture;
this.gtaoMaterial.uniforms.tDepth.value = this.depthTexture;
this.pdMaterial.defines.NORMAL_VECTOR_TYPE = normalVectorType;
this.pdMaterial.defines.DEPTH_SWIZZLING = depthValueSource;
this.pdMaterial.uniforms.tNormal.value = this.normalTexture;
this.pdMaterial.uniforms.tDepth.value = this.depthTexture;
this.depthRenderMaterial.uniforms.tDepth.value = this.normalRenderTarget.depthTexture;
}
setSceneClipBox( box ) {
if ( box ) {
this.gtaoMaterial.needsUpdate = this.gtaoMaterial.defines.SCENE_CLIP_BOX !== 1;
this.gtaoMaterial.defines.SCENE_CLIP_BOX = 1;
this.gtaoMaterial.uniforms.sceneBoxMin.value.copy( box.min );
this.gtaoMaterial.uniforms.sceneBoxMax.value.copy( box.max );
} else {
this.gtaoMaterial.needsUpdate = this.gtaoMaterial.defines.SCENE_CLIP_BOX === 0;
this.gtaoMaterial.defines.SCENE_CLIP_BOX = 0;
}
}
updateGtaoMaterial( parameters ) {
if ( parameters.radius !== undefined ) {
this.gtaoMaterial.uniforms.radius.value = parameters.radius;
}
if ( parameters.distanceExponent !== undefined ) {
this.gtaoMaterial.uniforms.distanceExponent.value = parameters.distanceExponent;
}
if ( parameters.thickness !== undefined ) {
this.gtaoMaterial.uniforms.thickness.value = parameters.thickness;
}
if ( parameters.distanceFallOff !== undefined ) {
this.gtaoMaterial.uniforms.distanceFallOff.value = parameters.distanceFallOff;
this.gtaoMaterial.needsUpdate = true;
}
if ( parameters.scale !== undefined ) {
this.gtaoMaterial.uniforms.scale.value = parameters.scale;
}
if ( parameters.samples !== undefined && parameters.samples !== this.gtaoMaterial.defines.SAMPLES ) {
this.gtaoMaterial.defines.SAMPLES = parameters.samples;
this.gtaoMaterial.needsUpdate = true;
}
if ( parameters.screenSpaceRadius !== undefined && ( parameters.screenSpaceRadius ? 1 : 0 ) !== this.gtaoMaterial.defines.SCREEN_SPACE_RADIUS ) {
this.gtaoMaterial.defines.SCREEN_SPACE_RADIUS = parameters.screenSpaceRadius ? 1 : 0;
this.gtaoMaterial.needsUpdate = true;
}
}
updatePdMaterial( parameters ) {
let updateShader = false;
if ( parameters.lumaPhi !== undefined ) {
this.pdMaterial.uniforms.lumaPhi.value = parameters.lumaPhi;
}
if ( parameters.depthPhi !== undefined ) {
this.pdMaterial.uniforms.depthPhi.value = parameters.depthPhi;
}
if ( parameters.normalPhi !== undefined ) {
this.pdMaterial.uniforms.normalPhi.value = parameters.normalPhi;
}
if ( parameters.radius !== undefined && parameters.radius !== this.radius ) {
this.pdMaterial.uniforms.radius.value = parameters.radius;
}
if ( parameters.radiusExponent !== undefined && parameters.radiusExponent !== this.pdRadiusExponent ) {
this.pdRadiusExponent = parameters.radiusExponent;
updateShader = true;
}
if ( parameters.rings !== undefined && parameters.rings !== this.pdRings ) {
this.pdRings = parameters.rings;
updateShader = true;
}
if ( parameters.samples !== undefined && parameters.samples !== this.pdSamples ) {
this.pdSamples = parameters.samples;
updateShader = true;
}
if ( updateShader ) {
this.pdMaterial.defines.SAMPLES = this.pdSamples;
this.pdMaterial.defines.SAMPLE_VECTORS = generatePdSamplePointInitializer( this.pdSamples, this.pdRings, this.pdRadiusExponent );
this.pdMaterial.needsUpdate = true;
}
}
render( renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) {
// render normals and depth (honor only meshes, points and lines do not contribute to AO)
if ( this._renderGBuffer ) {
this.overrideVisibility();
this.renderOverride( renderer, this.normalMaterial, this.normalRenderTarget, 0x7777ff, 1.0 );
this.restoreVisibility();
}
// render AO
this.gtaoMaterial.uniforms.cameraNear.value = this.camera.near;
this.gtaoMaterial.uniforms.cameraFar.value = this.camera.far;
this.gtaoMaterial.uniforms.cameraProjectionMatrix.value.copy( this.camera.projectionMatrix );
this.gtaoMaterial.uniforms.cameraProjectionMatrixInverse.value.copy( this.camera.projectionMatrixInverse );
this.gtaoMaterial.uniforms.cameraWorldMatrix.value.copy( this.camera.matrixWorld );
this.renderPass( renderer, this.gtaoMaterial, this.gtaoRenderTarget, 0xffffff, 1.0 );
// render poisson denoise
this.pdMaterial.uniforms.cameraProjectionMatrixInverse.value.copy( this.camera.projectionMatrixInverse );
this.renderPass( renderer, this.pdMaterial, this.pdRenderTarget, 0xffffff, 1.0 );
// output result to screen
switch ( this.output ) {
case GTAOPass.OUTPUT.Off:
break;
case GTAOPass.OUTPUT.Diffuse:
this.copyMaterial.uniforms.tDiffuse.value = readBuffer.texture;
this.copyMaterial.blending = NoBlending;
this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
break;
case GTAOPass.OUTPUT.AO:
this.copyMaterial.uniforms.tDiffuse.value = this.gtaoRenderTarget.texture;
this.copyMaterial.blending = NoBlending;
this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
break;
case GTAOPass.OUTPUT.Denoise:
this.copyMaterial.uniforms.tDiffuse.value = this.pdRenderTarget.texture;
this.copyMaterial.blending = NoBlending;
this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
break;
case GTAOPass.OUTPUT.Depth:
this.depthRenderMaterial.uniforms.cameraNear.value = this.camera.near;
this.depthRenderMaterial.uniforms.cameraFar.value = this.camera.far;
this.renderPass( renderer, this.depthRenderMaterial, this.renderToScreen ? null : writeBuffer );
break;
case GTAOPass.OUTPUT.Normal:
this.copyMaterial.uniforms.tDiffuse.value = this.normalRenderTarget.texture;
this.copyMaterial.blending = NoBlending;
this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
break;
case GTAOPass.OUTPUT.Default:
this.copyMaterial.uniforms.tDiffuse.value = readBuffer.texture;
this.copyMaterial.blending = NoBlending;
this.renderPass( renderer, this.copyMaterial, this.renderToScreen ? null : writeBuffer );
this.blendMaterial.uniforms.intensity.value = this.blendIntensity;
this.blendMaterial.uniforms.tDiffuse.value = this.pdRenderTarget.texture;
this.renderPass( renderer, this.blendMaterial, this.renderToScreen ? null : writeBuffer );
break;
default:
console.warn( 'THREE.GTAOPass: Unknown output type.' );
}
}
renderPass( renderer, passMaterial, renderTarget, clearColor, clearAlpha ) {
// save original state
renderer.getClearColor( this.originalClearColor );
const originalClearAlpha = renderer.getClearAlpha();
const originalAutoClear = renderer.autoClear;
renderer.setRenderTarget( renderTarget );
// setup pass state
renderer.autoClear = false;
if ( ( clearColor !== undefined ) && ( clearColor !== null ) ) {
renderer.setClearColor( clearColor );
renderer.setClearAlpha( clearAlpha || 0.0 );
renderer.clear();
}
this.fsQuad.material = passMaterial;
this.fsQuad.render( renderer );
// restore original state
renderer.autoClear = originalAutoClear;
renderer.setClearColor( this.originalClearColor );
renderer.setClearAlpha( originalClearAlpha );
}
renderOverride( renderer, overrideMaterial, renderTarget, clearColor, clearAlpha ) {
renderer.getClearColor( this.originalClearColor );
const originalClearAlpha = renderer.getClearAlpha();
const originalAutoClear = renderer.autoClear;
renderer.setRenderTarget( renderTarget );
renderer.autoClear = false;
clearColor = overrideMaterial.clearColor || clearColor;
clearAlpha = overrideMaterial.clearAlpha || clearAlpha;
if ( ( clearColor !== undefined ) && ( clearColor !== null ) ) {
renderer.setClearColor( clearColor );
renderer.setClearAlpha( clearAlpha || 0.0 );
renderer.clear();
}
this.scene.overrideMaterial = overrideMaterial;
renderer.render( this.scene, this.camera );
this.scene.overrideMaterial = null;
renderer.autoClear = originalAutoClear;
renderer.setClearColor( this.originalClearColor );
renderer.setClearAlpha( originalClearAlpha );
}
setSize( width, height ) {
this.width = width;
this.height = height;
this.gtaoRenderTarget.setSize( width, height );
this.normalRenderTarget.setSize( width, height );
this.pdRenderTarget.setSize( width, height );
this.gtaoMaterial.uniforms.resolution.value.set( width, height );
this.gtaoMaterial.uniforms.cameraProjectionMatrix.value.copy( this.camera.projectionMatrix );
this.gtaoMaterial.uniforms.cameraProjectionMatrixInverse.value.copy( this.camera.projectionMatrixInverse );
this.pdMaterial.uniforms.resolution.value.set( width, height );
this.pdMaterial.uniforms.cameraProjectionMatrixInverse.value.copy( this.camera.projectionMatrixInverse );
}
overrideVisibility() {
const scene = this.scene;
const cache = this._visibilityCache;
scene.traverse( function ( object ) {
cache.set( object, object.visible );
if ( object.isPoints || object.isLine ) object.visible = false;
} );
}
restoreVisibility() {
const scene = this.scene;
const cache = this._visibilityCache;
scene.traverse( function ( object ) {
const visible = cache.get( object );
object.visible = visible;
} );
cache.clear();
}
generateNoise( size = 64 ) {
const simplex = new SimplexNoise();
const arraySize = size * size * 4;
const data = new Uint8Array( arraySize );
for ( let i = 0; i < size; i ++ ) {
for ( let j = 0; j < size; j ++ ) {
const x = i;
const y = j;
data[ ( i * size + j ) * 4 ] = ( simplex.noise( x, y ) * 0.5 + 0.5 ) * 255;
data[ ( i * size + j ) * 4 + 1 ] = ( simplex.noise( x + size, y ) * 0.5 + 0.5 ) * 255;
data[ ( i * size + j ) * 4 + 2 ] = ( simplex.noise( x, y + size ) * 0.5 + 0.5 ) * 255;
data[ ( i * size + j ) * 4 + 3 ] = ( simplex.noise( x + size, y + size ) * 0.5 + 0.5 ) * 255;
}
}
const noiseTexture = new DataTexture( data, size, size, RGBAFormat, UnsignedByteType );
noiseTexture.wrapS = RepeatWrapping;
noiseTexture.wrapT = RepeatWrapping;
noiseTexture.needsUpdate = true;
return noiseTexture;
}
}
GTAOPass.OUTPUT = {
'Off': - 1,
'Default': 0,
'Diffuse': 1,
'Depth': 2,
'Normal': 3,
'AO': 4,
'Denoise': 5,
};
export { GTAOPass };

View File

@ -0,0 +1,424 @@
import {
DataTexture,
Matrix4,
RepeatWrapping,
Vector2,
Vector3,
} from '/static/javascript/three/build/three.module.js';
/**
* References:
* - implemented algorithm - GTAO
* - https://iryoku.com/downloads/Practical-Realtime-Strategies-for-Accurate-Indirect-Occlusion.pdf
* - https://github.com/Patapom/GodComplex/blob/master/Tests/TestHBIL/2018%20Mayaux%20-%20Horizon-Based%20Indirect%20Lighting%20(HBIL).pdf
*
* - other AO algorithms that are not implemented here:
* - Screen Space Ambient Occlusion (SSAO), see also SSAOShader.js
* - http://john-chapman-graphics.blogspot.com/2013/01/ssao-tutorial.html
* - https://learnopengl.com/Advanced-Lighting/SSAO
* - https://creativecoding.soe.ucsc.edu/courses/cmpm164/_schedule/AmbientOcclusion.pdf
* - https://drive.google.com/file/d/1SyagcEVplIm2KkRD3WQYSO9O0Iyi1hfy/edit
* - Scalable Ambient Occlusion (SAO), see also SAOShader.js
* - https://casual-effects.com/research/McGuire2012SAO/index.html
* - https://research.nvidia.com/sites/default/files/pubs/2012-06_Scalable-Ambient-Obscurance/McGuire12SAO.pdf
* - N8HO
* - https://github.com/N8python/n8ao
* - Horizon Based Ambient Occlusion (HBAO)
* - http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.577.2286&rep=rep1&type=pdf
* - https://www.derschmale.com/2013/12/20/an-alternative-implementation-for-hbao-2/
*
* - further reading
* - https://ceur-ws.org/Vol-3027/paper5.pdf
* - https://www.comp.nus.edu.sg/~lowkl/publications/mssao_visual_computer_2012.pdf
* - https://web.ics.purdue.edu/~tmcgraw/papers/mcgraw-ao-2008.pdf
* - https://www.activision.com/cdn/research/Practical_Real_Time_Strategies_for_Accurate_Indirect_Occlusion_NEW%20VERSION_COLOR.pdf
* - https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.390.2463&rep=rep1&type=pdf
* - https://www.intel.com/content/www/us/en/developer/articles/technical/adaptive-screen-space-ambient-occlusion.html
*/
const GTAOShader = {
name: 'GTAOShader',
defines: {
PERSPECTIVE_CAMERA: 1,
SAMPLES: 16,
NORMAL_VECTOR_TYPE: 1,
DEPTH_SWIZZLING: 'x',
SCREEN_SPACE_RADIUS: 0,
SCREEN_SPACE_RADIUS_SCALE: 100.0,
SCENE_CLIP_BOX: 0,
},
uniforms: {
tNormal: { value: null },
tDepth: { value: null },
tNoise: { value: null },
resolution: { value: new Vector2() },
cameraNear: { value: null },
cameraFar: { value: null },
cameraProjectionMatrix: { value: new Matrix4() },
cameraProjectionMatrixInverse: { value: new Matrix4() },
cameraWorldMatrix: { value: new Matrix4() },
radius: { value: 0.25 },
distanceExponent: { value: 1. },
thickness: { value: 1. },
distanceFallOff: { value: 1. },
scale: { value: 1. },
sceneBoxMin: { value: new Vector3( - 1, - 1, - 1 ) },
sceneBoxMax: { value: new Vector3( 1, 1, 1 ) },
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
varying vec2 vUv;
uniform highp sampler2D tNormal;
uniform highp sampler2D tDepth;
uniform sampler2D tNoise;
uniform vec2 resolution;
uniform float cameraNear;
uniform float cameraFar;
uniform mat4 cameraProjectionMatrix;
uniform mat4 cameraProjectionMatrixInverse;
uniform mat4 cameraWorldMatrix;
uniform float radius;
uniform float distanceExponent;
uniform float thickness;
uniform float distanceFallOff;
uniform float scale;
#if SCENE_CLIP_BOX == 1
uniform vec3 sceneBoxMin;
uniform vec3 sceneBoxMax;
#endif
#include <common>
#include <packing>
#ifndef FRAGMENT_OUTPUT
#define FRAGMENT_OUTPUT vec4(vec3(ao), 1.)
#endif
vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;
return viewSpacePosition.xyz / viewSpacePosition.w;
}
float getDepth(const vec2 uv) {
return textureLod(tDepth, uv.xy, 0.0).DEPTH_SWIZZLING;
}
float fetchDepth(const ivec2 uv) {
return texelFetch(tDepth, uv.xy, 0).DEPTH_SWIZZLING;
}
float getViewZ(const in float depth) {
#if PERSPECTIVE_CAMERA == 1
return perspectiveDepthToViewZ(depth, cameraNear, cameraFar);
#else
return orthographicDepthToViewZ(depth, cameraNear, cameraFar);
#endif
}
vec3 computeNormalFromDepth(const vec2 uv) {
vec2 size = vec2(textureSize(tDepth, 0));
ivec2 p = ivec2(uv * size);
float c0 = fetchDepth(p);
float l2 = fetchDepth(p - ivec2(2, 0));
float l1 = fetchDepth(p - ivec2(1, 0));
float r1 = fetchDepth(p + ivec2(1, 0));
float r2 = fetchDepth(p + ivec2(2, 0));
float b2 = fetchDepth(p - ivec2(0, 2));
float b1 = fetchDepth(p - ivec2(0, 1));
float t1 = fetchDepth(p + ivec2(0, 1));
float t2 = fetchDepth(p + ivec2(0, 2));
float dl = abs((2.0 * l1 - l2) - c0);
float dr = abs((2.0 * r1 - r2) - c0);
float db = abs((2.0 * b1 - b2) - c0);
float dt = abs((2.0 * t1 - t2) - c0);
vec3 ce = getViewPosition(uv, c0).xyz;
vec3 dpdx = (dl < dr) ? ce - getViewPosition((uv - vec2(1.0 / size.x, 0.0)), l1).xyz : -ce + getViewPosition((uv + vec2(1.0 / size.x, 0.0)), r1).xyz;
vec3 dpdy = (db < dt) ? ce - getViewPosition((uv - vec2(0.0, 1.0 / size.y)), b1).xyz : -ce + getViewPosition((uv + vec2(0.0, 1.0 / size.y)), t1).xyz;
return normalize(cross(dpdx, dpdy));
}
vec3 getViewNormal(const vec2 uv) {
#if NORMAL_VECTOR_TYPE == 2
return normalize(textureLod(tNormal, uv, 0.).rgb);
#elif NORMAL_VECTOR_TYPE == 1
return unpackRGBToNormal(textureLod(tNormal, uv, 0.).rgb);
#else
return computeNormalFromDepth(uv);
#endif
}
vec3 getSceneUvAndDepth(vec3 sampleViewPos) {
vec4 sampleClipPos = cameraProjectionMatrix * vec4(sampleViewPos, 1.);
vec2 sampleUv = sampleClipPos.xy / sampleClipPos.w * 0.5 + 0.5;
float sampleSceneDepth = getDepth(sampleUv);
return vec3(sampleUv, sampleSceneDepth);
}
void main() {
float depth = getDepth(vUv.xy);
if (depth >= 1.0) {
discard;
return;
}
vec3 viewPos = getViewPosition(vUv, depth);
vec3 viewNormal = getViewNormal(vUv);
float radiusToUse = radius;
float distanceFalloffToUse = thickness;
#if SCREEN_SPACE_RADIUS == 1
float radiusScale = getViewPosition(vec2(0.5 + float(SCREEN_SPACE_RADIUS_SCALE) / resolution.x, 0.0), depth).x;
radiusToUse *= radiusScale;
distanceFalloffToUse *= radiusScale;
#endif
#if SCENE_CLIP_BOX == 1
vec3 worldPos = (cameraWorldMatrix * vec4(viewPos, 1.0)).xyz;
float boxDistance = length(max(vec3(0.0), max(sceneBoxMin - worldPos, worldPos - sceneBoxMax)));
if (boxDistance > radiusToUse) {
discard;
return;
}
#endif
vec2 noiseResolution = vec2(textureSize(tNoise, 0));
vec2 noiseUv = vUv * resolution / noiseResolution;
vec4 noiseTexel = textureLod(tNoise, noiseUv, 0.0);
vec3 randomVec = noiseTexel.xyz * 2.0 - 1.0;
vec3 tangent = normalize(vec3(randomVec.xy, 0.));
vec3 bitangent = vec3(-tangent.y, tangent.x, 0.);
mat3 kernelMatrix = mat3(tangent, bitangent, vec3(0., 0., 1.));
const int DIRECTIONS = SAMPLES < 30 ? 3 : 5;
const int STEPS = (SAMPLES + DIRECTIONS - 1) / DIRECTIONS;
float ao = 0.0;
for (int i = 0; i < DIRECTIONS; ++i) {
float angle = float(i) / float(DIRECTIONS) * PI;
vec4 sampleDir = vec4(cos(angle), sin(angle), 0., 0.5 + 0.5 * noiseTexel.w);
sampleDir.xyz = normalize(kernelMatrix * sampleDir.xyz);
vec3 viewDir = normalize(-viewPos.xyz);
vec3 sliceBitangent = normalize(cross(sampleDir.xyz, viewDir));
vec3 sliceTangent = cross(sliceBitangent, viewDir);
vec3 normalInSlice = normalize(viewNormal - sliceBitangent * dot(viewNormal, sliceBitangent));
vec3 tangentToNormalInSlice = cross(normalInSlice, sliceBitangent);
vec2 cosHorizons = vec2(dot(viewDir, tangentToNormalInSlice), dot(viewDir, -tangentToNormalInSlice));
for (int j = 0; j < STEPS; ++j) {
vec3 sampleViewOffset = sampleDir.xyz * radiusToUse * sampleDir.w * pow(float(j + 1) / float(STEPS), distanceExponent);
vec3 sampleSceneUvDepth = getSceneUvAndDepth(viewPos + sampleViewOffset);
vec3 sampleSceneViewPos = getViewPosition(sampleSceneUvDepth.xy, sampleSceneUvDepth.z);
vec3 viewDelta = sampleSceneViewPos - viewPos;
if (abs(viewDelta.z) < thickness) {
float sampleCosHorizon = dot(viewDir, normalize(viewDelta));
cosHorizons.x += max(0., (sampleCosHorizon - cosHorizons.x) * mix(1., 2. / float(j + 2), distanceFallOff));
}
sampleSceneUvDepth = getSceneUvAndDepth(viewPos - sampleViewOffset);
sampleSceneViewPos = getViewPosition(sampleSceneUvDepth.xy, sampleSceneUvDepth.z);
viewDelta = sampleSceneViewPos - viewPos;
if (abs(viewDelta.z) < thickness) {
float sampleCosHorizon = dot(viewDir, normalize(viewDelta));
cosHorizons.y += max(0., (sampleCosHorizon - cosHorizons.y) * mix(1., 2. / float(j + 2), distanceFallOff));
}
}
vec2 sinHorizons = sqrt(1. - cosHorizons * cosHorizons);
float nx = dot(normalInSlice, sliceTangent);
float ny = dot(normalInSlice, viewDir);
float nxb = 1. / 2. * (acos(cosHorizons.y) - acos(cosHorizons.x) + sinHorizons.x * cosHorizons.x - sinHorizons.y * cosHorizons.y);
float nyb = 1. / 2. * (2. - cosHorizons.x * cosHorizons.x - cosHorizons.y * cosHorizons.y);
float occlusion = nx * nxb + ny * nyb;
ao += occlusion;
}
ao = clamp(ao / float(DIRECTIONS), 0., 1.);
#if SCENE_CLIP_BOX == 1
ao = mix(ao, 1., smoothstep(0., radiusToUse, boxDistance));
#endif
ao = pow(ao, scale);
gl_FragColor = FRAGMENT_OUTPUT;
}`
};
const GTAODepthShader = {
name: 'GTAODepthShader',
defines: {
PERSPECTIVE_CAMERA: 1
},
uniforms: {
tDepth: { value: null },
cameraNear: { value: null },
cameraFar: { value: null },
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDepth;
uniform float cameraNear;
uniform float cameraFar;
varying vec2 vUv;
#include <packing>
float getLinearDepth( const in vec2 screenPosition ) {
#if PERSPECTIVE_CAMERA == 1
float fragCoordZ = texture2D( tDepth, screenPosition ).x;
float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );
return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
#else
return texture2D( tDepth, screenPosition ).x;
#endif
}
void main() {
float depth = getLinearDepth( vUv );
gl_FragColor = vec4( vec3( 1.0 - depth ), 1.0 );
}`
};
const GTAOBlendShader = {
name: 'GTAOBlendShader',
uniforms: {
tDiffuse: { value: null },
intensity: { value: 1.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float intensity;
uniform sampler2D tDiffuse;
varying vec2 vUv;
void main() {
vec4 texel = texture2D( tDiffuse, vUv );
gl_FragColor = vec4(mix(vec3(1.), texel.rgb, intensity), texel.a);
}`
};
function generateMagicSquareNoise( size = 5 ) {
const noiseSize = Math.floor( size ) % 2 === 0 ? Math.floor( size ) + 1 : Math.floor( size );
const magicSquare = generateMagicSquare( noiseSize );
const noiseSquareSize = magicSquare.length;
const data = new Uint8Array( noiseSquareSize * 4 );
for ( let inx = 0; inx < noiseSquareSize; ++ inx ) {
const iAng = magicSquare[ inx ];
const angle = ( 2 * Math.PI * iAng ) / noiseSquareSize;
const randomVec = new Vector3(
Math.cos( angle ),
Math.sin( angle ),
0
).normalize();
data[ inx * 4 ] = ( randomVec.x * 0.5 + 0.5 ) * 255;
data[ inx * 4 + 1 ] = ( randomVec.y * 0.5 + 0.5 ) * 255;
data[ inx * 4 + 2 ] = 127;
data[ inx * 4 + 3 ] = 255;
}
const noiseTexture = new DataTexture( data, noiseSize, noiseSize );
noiseTexture.wrapS = RepeatWrapping;
noiseTexture.wrapT = RepeatWrapping;
noiseTexture.needsUpdate = true;
return noiseTexture;
}
function generateMagicSquare( size ) {
const noiseSize = Math.floor( size ) % 2 === 0 ? Math.floor( size ) + 1 : Math.floor( size );
const noiseSquareSize = noiseSize * noiseSize;
const magicSquare = Array( noiseSquareSize ).fill( 0 );
let i = Math.floor( noiseSize / 2 );
let j = noiseSize - 1;
for ( let num = 1; num <= noiseSquareSize; ) {
if ( i === - 1 && j === noiseSize ) {
j = noiseSize - 2;
i = 0;
} else {
if ( j === noiseSize ) {
j = 0;
}
if ( i < 0 ) {
i = noiseSize - 1;
}
}
if ( magicSquare[ i * noiseSize + j ] !== 0 ) {
j -= 2;
i ++;
continue;
} else {
magicSquare[ i * noiseSize + j ] = num ++;
}
j ++;
i --;
}
return magicSquare;
}
export { generateMagicSquareNoise, GTAOShader, GTAODepthShader, GTAOBlendShader };

View File

@ -0,0 +1,43 @@
/**
* Gamma Correction Shader
* http://en.wikipedia.org/wiki/gamma_correction
*/
const GammaCorrectionShader = {
name: 'GammaCorrectionShader',
uniforms: {
'tDiffuse': { value: null }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
varying vec2 vUv;
void main() {
vec4 tex = texture2D( tDiffuse, vUv );
gl_FragColor = sRGBTransferOETF( tex );
}`
};
export { GammaCorrectionShader };

View File

@ -0,0 +1,639 @@
/**
* Octahedron and Quantization encodings based on work by:
*
* @link https://github.com/tsherif/mesh-quantization-example
*
*/
import {
BufferAttribute,
Matrix3,
Matrix4,
Vector3
} from '/static/javascript/three/build/three.module.js';
import { PackedPhongMaterial } from './PackedPhongMaterial.js';
/**
* Make the input mesh.geometry's normal attribute encoded and compressed by 3 different methods.
* Also will change the mesh.material to `PackedPhongMaterial` which let the vertex shader program decode the normal data.
*
* @param {THREE.Mesh} mesh
* @param {String} encodeMethod "DEFAULT" || "OCT1Byte" || "OCT2Byte" || "ANGLES"
*
*/
function compressNormals( mesh, encodeMethod ) {
if ( ! mesh.geometry ) {
console.error( 'Mesh must contain geometry. ' );
}
const normal = mesh.geometry.attributes.normal;
if ( ! normal ) {
console.error( 'Geometry must contain normal attribute. ' );
}
if ( normal.isPacked ) return;
if ( normal.itemSize != 3 ) {
console.error( 'normal.itemSize is not 3, which cannot be encoded. ' );
}
const array = normal.array;
const count = normal.count;
let result;
if ( encodeMethod == 'DEFAULT' ) {
// TODO: Add 1 byte to the result, making the encoded length to be 4 bytes.
result = new Uint8Array( count * 3 );
for ( let idx = 0; idx < array.length; idx += 3 ) {
const encoded = defaultEncode( array[ idx ], array[ idx + 1 ], array[ idx + 2 ], 1 );
result[ idx + 0 ] = encoded[ 0 ];
result[ idx + 1 ] = encoded[ 1 ];
result[ idx + 2 ] = encoded[ 2 ];
}
mesh.geometry.setAttribute( 'normal', new BufferAttribute( result, 3, true ) );
mesh.geometry.attributes.normal.bytes = result.length * 1;
} else if ( encodeMethod == 'OCT1Byte' ) {
/**
* It is not recommended to use 1-byte octahedron normals encoding unless you want to extremely reduce the memory usage
* As it makes vertex data not aligned to a 4 byte boundary which may harm some WebGL implementations and sometimes the normal distortion is visible
* Please refer to @zeux 's comments in https://github.com/mrdoob/three.js/pull/18208
*/
result = new Int8Array( count * 2 );
for ( let idx = 0; idx < array.length; idx += 3 ) {
const encoded = octEncodeBest( array[ idx ], array[ idx + 1 ], array[ idx + 2 ], 1 );
result[ idx / 3 * 2 + 0 ] = encoded[ 0 ];
result[ idx / 3 * 2 + 1 ] = encoded[ 1 ];
}
mesh.geometry.setAttribute( 'normal', new BufferAttribute( result, 2, true ) );
mesh.geometry.attributes.normal.bytes = result.length * 1;
} else if ( encodeMethod == 'OCT2Byte' ) {
result = new Int16Array( count * 2 );
for ( let idx = 0; idx < array.length; idx += 3 ) {
const encoded = octEncodeBest( array[ idx ], array[ idx + 1 ], array[ idx + 2 ], 2 );
result[ idx / 3 * 2 + 0 ] = encoded[ 0 ];
result[ idx / 3 * 2 + 1 ] = encoded[ 1 ];
}
mesh.geometry.setAttribute( 'normal', new BufferAttribute( result, 2, true ) );
mesh.geometry.attributes.normal.bytes = result.length * 2;
} else if ( encodeMethod == 'ANGLES' ) {
result = new Uint16Array( count * 2 );
for ( let idx = 0; idx < array.length; idx += 3 ) {
const encoded = anglesEncode( array[ idx ], array[ idx + 1 ], array[ idx + 2 ] );
result[ idx / 3 * 2 + 0 ] = encoded[ 0 ];
result[ idx / 3 * 2 + 1 ] = encoded[ 1 ];
}
mesh.geometry.setAttribute( 'normal', new BufferAttribute( result, 2, true ) );
mesh.geometry.attributes.normal.bytes = result.length * 2;
} else {
console.error( 'Unrecognized encoding method, should be `DEFAULT` or `ANGLES` or `OCT`. ' );
}
mesh.geometry.attributes.normal.needsUpdate = true;
mesh.geometry.attributes.normal.isPacked = true;
mesh.geometry.attributes.normal.packingMethod = encodeMethod;
// modify material
if ( ! ( mesh.material instanceof PackedPhongMaterial ) ) {
mesh.material = new PackedPhongMaterial().copy( mesh.material );
}
if ( encodeMethod == 'ANGLES' ) {
mesh.material.defines.USE_PACKED_NORMAL = 0;
}
if ( encodeMethod == 'OCT1Byte' ) {
mesh.material.defines.USE_PACKED_NORMAL = 1;
}
if ( encodeMethod == 'OCT2Byte' ) {
mesh.material.defines.USE_PACKED_NORMAL = 1;
}
if ( encodeMethod == 'DEFAULT' ) {
mesh.material.defines.USE_PACKED_NORMAL = 2;
}
}
/**
* Make the input mesh.geometry's position attribute encoded and compressed.
* Also will change the mesh.material to `PackedPhongMaterial` which let the vertex shader program decode the position data.
*
* @param {THREE.Mesh} mesh
*
*/
function compressPositions( mesh ) {
if ( ! mesh.geometry ) {
console.error( 'Mesh must contain geometry. ' );
}
const position = mesh.geometry.attributes.position;
if ( ! position ) {
console.error( 'Geometry must contain position attribute. ' );
}
if ( position.isPacked ) return;
if ( position.itemSize != 3 ) {
console.error( 'position.itemSize is not 3, which cannot be packed. ' );
}
const array = position.array;
const encodingBytes = 2;
const result = quantizedEncode( array, encodingBytes );
const quantized = result.quantized;
const decodeMat = result.decodeMat;
// IMPORTANT: calculate original geometry bounding info first, before updating packed positions
if ( mesh.geometry.boundingBox == null ) mesh.geometry.computeBoundingBox();
if ( mesh.geometry.boundingSphere == null ) mesh.geometry.computeBoundingSphere();
mesh.geometry.setAttribute( 'position', new BufferAttribute( quantized, 3 ) );
mesh.geometry.attributes.position.isPacked = true;
mesh.geometry.attributes.position.needsUpdate = true;
mesh.geometry.attributes.position.bytes = quantized.length * encodingBytes;
// modify material
if ( ! ( mesh.material instanceof PackedPhongMaterial ) ) {
mesh.material = new PackedPhongMaterial().copy( mesh.material );
}
mesh.material.defines.USE_PACKED_POSITION = 0;
mesh.material.uniforms.quantizeMatPos.value = decodeMat;
mesh.material.uniforms.quantizeMatPos.needsUpdate = true;
}
/**
* Make the input mesh.geometry's uv attribute encoded and compressed.
* Also will change the mesh.material to `PackedPhongMaterial` which let the vertex shader program decode the uv data.
*
* @param {THREE.Mesh} mesh
*
*/
function compressUvs( mesh ) {
if ( ! mesh.geometry ) {
console.error( 'Mesh must contain geometry property. ' );
}
const uvs = mesh.geometry.attributes.uv;
if ( ! uvs ) {
console.error( 'Geometry must contain uv attribute. ' );
}
if ( uvs.isPacked ) return;
const range = { min: Infinity, max: - Infinity };
const array = uvs.array;
for ( let i = 0; i < array.length; i ++ ) {
range.min = Math.min( range.min, array[ i ] );
range.max = Math.max( range.max, array[ i ] );
}
let result;
if ( range.min >= - 1.0 && range.max <= 1.0 ) {
// use default encoding method
result = new Uint16Array( array.length );
for ( let i = 0; i < array.length; i += 2 ) {
const encoded = defaultEncode( array[ i ], array[ i + 1 ], 0, 2 );
result[ i ] = encoded[ 0 ];
result[ i + 1 ] = encoded[ 1 ];
}
mesh.geometry.setAttribute( 'uv', new BufferAttribute( result, 2, true ) );
mesh.geometry.attributes.uv.isPacked = true;
mesh.geometry.attributes.uv.needsUpdate = true;
mesh.geometry.attributes.uv.bytes = result.length * 2;
if ( ! ( mesh.material instanceof PackedPhongMaterial ) ) {
mesh.material = new PackedPhongMaterial().copy( mesh.material );
}
mesh.material.defines.USE_PACKED_UV = 0;
} else {
// use quantized encoding method
result = quantizedEncodeUV( array, 2 );
mesh.geometry.setAttribute( 'uv', new BufferAttribute( result.quantized, 2 ) );
mesh.geometry.attributes.uv.isPacked = true;
mesh.geometry.attributes.uv.needsUpdate = true;
mesh.geometry.attributes.uv.bytes = result.quantized.length * 2;
if ( ! ( mesh.material instanceof PackedPhongMaterial ) ) {
mesh.material = new PackedPhongMaterial().copy( mesh.material );
}
mesh.material.defines.USE_PACKED_UV = 1;
mesh.material.uniforms.quantizeMatUV.value = result.decodeMat;
mesh.material.uniforms.quantizeMatUV.needsUpdate = true;
}
}
// Encoding functions
function defaultEncode( x, y, z, bytes ) {
if ( bytes == 1 ) {
const tmpx = Math.round( ( x + 1 ) * 0.5 * 255 );
const tmpy = Math.round( ( y + 1 ) * 0.5 * 255 );
const tmpz = Math.round( ( z + 1 ) * 0.5 * 255 );
return new Uint8Array( [ tmpx, tmpy, tmpz ] );
} else if ( bytes == 2 ) {
const tmpx = Math.round( ( x + 1 ) * 0.5 * 65535 );
const tmpy = Math.round( ( y + 1 ) * 0.5 * 65535 );
const tmpz = Math.round( ( z + 1 ) * 0.5 * 65535 );
return new Uint16Array( [ tmpx, tmpy, tmpz ] );
} else {
console.error( 'number of bytes must be 1 or 2' );
}
}
// for `Angles` encoding
function anglesEncode( x, y, z ) {
const normal0 = parseInt( 0.5 * ( 1.0 + Math.atan2( y, x ) / Math.PI ) * 65535 );
const normal1 = parseInt( 0.5 * ( 1.0 + z ) * 65535 );
return new Uint16Array( [ normal0, normal1 ] );
}
// for `Octahedron` encoding
function octEncodeBest( x, y, z, bytes ) {
let oct, dec, best, currentCos, bestCos;
// Test various combinations of ceil and floor
// to minimize rounding errors
best = oct = octEncodeVec3( x, y, z, 'floor', 'floor' );
dec = octDecodeVec2( oct );
bestCos = dot( x, y, z, dec );
oct = octEncodeVec3( x, y, z, 'ceil', 'floor' );
dec = octDecodeVec2( oct );
currentCos = dot( x, y, z, dec );
if ( currentCos > bestCos ) {
best = oct;
bestCos = currentCos;
}
oct = octEncodeVec3( x, y, z, 'floor', 'ceil' );
dec = octDecodeVec2( oct );
currentCos = dot( x, y, z, dec );
if ( currentCos > bestCos ) {
best = oct;
bestCos = currentCos;
}
oct = octEncodeVec3( x, y, z, 'ceil', 'ceil' );
dec = octDecodeVec2( oct );
currentCos = dot( x, y, z, dec );
if ( currentCos > bestCos ) {
best = oct;
}
return best;
function octEncodeVec3( x0, y0, z0, xfunc, yfunc ) {
let x = x0 / ( Math.abs( x0 ) + Math.abs( y0 ) + Math.abs( z0 ) );
let y = y0 / ( Math.abs( x0 ) + Math.abs( y0 ) + Math.abs( z0 ) );
if ( z < 0 ) {
const tempx = ( 1 - Math.abs( y ) ) * ( x >= 0 ? 1 : - 1 );
const tempy = ( 1 - Math.abs( x ) ) * ( y >= 0 ? 1 : - 1 );
x = tempx;
y = tempy;
let diff = 1 - Math.abs( x ) - Math.abs( y );
if ( diff > 0 ) {
diff += 0.001;
x += x > 0 ? diff / 2 : - diff / 2;
y += y > 0 ? diff / 2 : - diff / 2;
}
}
if ( bytes == 1 ) {
return new Int8Array( [
Math[ xfunc ]( x * 127.5 + ( x < 0 ? 1 : 0 ) ),
Math[ yfunc ]( y * 127.5 + ( y < 0 ? 1 : 0 ) )
] );
}
if ( bytes == 2 ) {
return new Int16Array( [
Math[ xfunc ]( x * 32767.5 + ( x < 0 ? 1 : 0 ) ),
Math[ yfunc ]( y * 32767.5 + ( y < 0 ? 1 : 0 ) )
] );
}
}
function octDecodeVec2( oct ) {
let x = oct[ 0 ];
let y = oct[ 1 ];
if ( bytes == 1 ) {
x /= x < 0 ? 127 : 128;
y /= y < 0 ? 127 : 128;
} else if ( bytes == 2 ) {
x /= x < 0 ? 32767 : 32768;
y /= y < 0 ? 32767 : 32768;
}
const z = 1 - Math.abs( x ) - Math.abs( y );
if ( z < 0 ) {
const tmpx = x;
x = ( 1 - Math.abs( y ) ) * ( x >= 0 ? 1 : - 1 );
y = ( 1 - Math.abs( tmpx ) ) * ( y >= 0 ? 1 : - 1 );
}
const length = Math.sqrt( x * x + y * y + z * z );
return [
x / length,
y / length,
z / length
];
}
function dot( x, y, z, vec3 ) {
return x * vec3[ 0 ] + y * vec3[ 1 ] + z * vec3[ 2 ];
}
}
function quantizedEncode( array, bytes ) {
let quantized, segments;
if ( bytes == 1 ) {
quantized = new Uint8Array( array.length );
segments = 255;
} else if ( bytes == 2 ) {
quantized = new Uint16Array( array.length );
segments = 65535;
} else {
console.error( 'number of bytes error! ' );
}
const decodeMat = new Matrix4();
const min = new Float32Array( 3 );
const max = new Float32Array( 3 );
min[ 0 ] = min[ 1 ] = min[ 2 ] = Number.MAX_VALUE;
max[ 0 ] = max[ 1 ] = max[ 2 ] = - Number.MAX_VALUE;
for ( let i = 0; i < array.length; i += 3 ) {
min[ 0 ] = Math.min( min[ 0 ], array[ i + 0 ] );
min[ 1 ] = Math.min( min[ 1 ], array[ i + 1 ] );
min[ 2 ] = Math.min( min[ 2 ], array[ i + 2 ] );
max[ 0 ] = Math.max( max[ 0 ], array[ i + 0 ] );
max[ 1 ] = Math.max( max[ 1 ], array[ i + 1 ] );
max[ 2 ] = Math.max( max[ 2 ], array[ i + 2 ] );
}
decodeMat.scale( new Vector3(
( max[ 0 ] - min[ 0 ] ) / segments,
( max[ 1 ] - min[ 1 ] ) / segments,
( max[ 2 ] - min[ 2 ] ) / segments
) );
decodeMat.elements[ 12 ] = min[ 0 ];
decodeMat.elements[ 13 ] = min[ 1 ];
decodeMat.elements[ 14 ] = min[ 2 ];
decodeMat.transpose();
const multiplier = new Float32Array( [
max[ 0 ] !== min[ 0 ] ? segments / ( max[ 0 ] - min[ 0 ] ) : 0,
max[ 1 ] !== min[ 1 ] ? segments / ( max[ 1 ] - min[ 1 ] ) : 0,
max[ 2 ] !== min[ 2 ] ? segments / ( max[ 2 ] - min[ 2 ] ) : 0
] );
for ( let i = 0; i < array.length; i += 3 ) {
quantized[ i + 0 ] = Math.floor( ( array[ i + 0 ] - min[ 0 ] ) * multiplier[ 0 ] );
quantized[ i + 1 ] = Math.floor( ( array[ i + 1 ] - min[ 1 ] ) * multiplier[ 1 ] );
quantized[ i + 2 ] = Math.floor( ( array[ i + 2 ] - min[ 2 ] ) * multiplier[ 2 ] );
}
return {
quantized: quantized,
decodeMat: decodeMat
};
}
function quantizedEncodeUV( array, bytes ) {
let quantized, segments;
if ( bytes == 1 ) {
quantized = new Uint8Array( array.length );
segments = 255;
} else if ( bytes == 2 ) {
quantized = new Uint16Array( array.length );
segments = 65535;
} else {
console.error( 'number of bytes error! ' );
}
const decodeMat = new Matrix3();
const min = new Float32Array( 2 );
const max = new Float32Array( 2 );
min[ 0 ] = min[ 1 ] = Number.MAX_VALUE;
max[ 0 ] = max[ 1 ] = - Number.MAX_VALUE;
for ( let i = 0; i < array.length; i += 2 ) {
min[ 0 ] = Math.min( min[ 0 ], array[ i + 0 ] );
min[ 1 ] = Math.min( min[ 1 ], array[ i + 1 ] );
max[ 0 ] = Math.max( max[ 0 ], array[ i + 0 ] );
max[ 1 ] = Math.max( max[ 1 ], array[ i + 1 ] );
}
decodeMat.scale(
( max[ 0 ] - min[ 0 ] ) / segments,
( max[ 1 ] - min[ 1 ] ) / segments
);
decodeMat.elements[ 6 ] = min[ 0 ];
decodeMat.elements[ 7 ] = min[ 1 ];
decodeMat.transpose();
const multiplier = new Float32Array( [
max[ 0 ] !== min[ 0 ] ? segments / ( max[ 0 ] - min[ 0 ] ) : 0,
max[ 1 ] !== min[ 1 ] ? segments / ( max[ 1 ] - min[ 1 ] ) : 0
] );
for ( let i = 0; i < array.length; i += 2 ) {
quantized[ i + 0 ] = Math.floor( ( array[ i + 0 ] - min[ 0 ] ) * multiplier[ 0 ] );
quantized[ i + 1 ] = Math.floor( ( array[ i + 1 ] - min[ 1 ] ) * multiplier[ 1 ] );
}
return {
quantized: quantized,
decodeMat: decodeMat
};
}
export {
compressNormals,
compressPositions,
compressUvs,
};

View File

@ -0,0 +1,221 @@
import { Vector3 } from '/static/javascript/three/build/three.module.js';
/**
* Generates 2D-Coordinates in a very fast way.
*
* Based on work by:
* @link http://www.openprocessing.org/sketch/15493
*
* @param center Center of Hilbert curve.
* @param size Total width of Hilbert curve.
* @param iterations Number of subdivisions.
* @param v0 Corner index -X, -Z.
* @param v1 Corner index -X, +Z.
* @param v2 Corner index +X, +Z.
* @param v3 Corner index +X, -Z.
*/
function hilbert2D( center = new Vector3( 0, 0, 0 ), size = 10, iterations = 1, v0 = 0, v1 = 1, v2 = 2, v3 = 3 ) {
const half = size / 2;
const vec_s = [
new Vector3( center.x - half, center.y, center.z - half ),
new Vector3( center.x - half, center.y, center.z + half ),
new Vector3( center.x + half, center.y, center.z + half ),
new Vector3( center.x + half, center.y, center.z - half )
];
const vec = [
vec_s[ v0 ],
vec_s[ v1 ],
vec_s[ v2 ],
vec_s[ v3 ]
];
// Recurse iterations
if ( 0 <= -- iterations ) {
return [
...hilbert2D( vec[ 0 ], half, iterations, v0, v3, v2, v1 ),
...hilbert2D( vec[ 1 ], half, iterations, v0, v1, v2, v3 ),
...hilbert2D( vec[ 2 ], half, iterations, v0, v1, v2, v3 ),
...hilbert2D( vec[ 3 ], half, iterations, v2, v1, v0, v3 )
];
}
// Return complete Hilbert Curve.
return vec;
}
/**
* Generates 3D-Coordinates in a very fast way.
*
* Based on work by:
* @link https://openprocessing.org/user/5654
*
* @param center Center of Hilbert curve.
* @param size Total width of Hilbert curve.
* @param iterations Number of subdivisions.
* @param v0 Corner index -X, +Y, -Z.
* @param v1 Corner index -X, +Y, +Z.
* @param v2 Corner index -X, -Y, +Z.
* @param v3 Corner index -X, -Y, -Z.
* @param v4 Corner index +X, -Y, -Z.
* @param v5 Corner index +X, -Y, +Z.
* @param v6 Corner index +X, +Y, +Z.
* @param v7 Corner index +X, +Y, -Z.
*/
function hilbert3D( center = new Vector3( 0, 0, 0 ), size = 10, iterations = 1, v0 = 0, v1 = 1, v2 = 2, v3 = 3, v4 = 4, v5 = 5, v6 = 6, v7 = 7 ) {
// Default Vars
const half = size / 2;
const vec_s = [
new Vector3( center.x - half, center.y + half, center.z - half ),
new Vector3( center.x - half, center.y + half, center.z + half ),
new Vector3( center.x - half, center.y - half, center.z + half ),
new Vector3( center.x - half, center.y - half, center.z - half ),
new Vector3( center.x + half, center.y - half, center.z - half ),
new Vector3( center.x + half, center.y - half, center.z + half ),
new Vector3( center.x + half, center.y + half, center.z + half ),
new Vector3( center.x + half, center.y + half, center.z - half )
];
const vec = [
vec_s[ v0 ],
vec_s[ v1 ],
vec_s[ v2 ],
vec_s[ v3 ],
vec_s[ v4 ],
vec_s[ v5 ],
vec_s[ v6 ],
vec_s[ v7 ]
];
// Recurse iterations
if ( -- iterations >= 0 ) {
return [
...hilbert3D( vec[ 0 ], half, iterations, v0, v3, v4, v7, v6, v5, v2, v1 ),
...hilbert3D( vec[ 1 ], half, iterations, v0, v7, v6, v1, v2, v5, v4, v3 ),
...hilbert3D( vec[ 2 ], half, iterations, v0, v7, v6, v1, v2, v5, v4, v3 ),
...hilbert3D( vec[ 3 ], half, iterations, v2, v3, v0, v1, v6, v7, v4, v5 ),
...hilbert3D( vec[ 4 ], half, iterations, v2, v3, v0, v1, v6, v7, v4, v5 ),
...hilbert3D( vec[ 5 ], half, iterations, v4, v3, v2, v5, v6, v1, v0, v7 ),
...hilbert3D( vec[ 6 ], half, iterations, v4, v3, v2, v5, v6, v1, v0, v7 ),
...hilbert3D( vec[ 7 ], half, iterations, v6, v5, v2, v1, v0, v3, v4, v7 )
];
}
// Return complete Hilbert Curve.
return vec;
}
/**
* Generates a Gosper curve (lying in the XY plane)
*
* https://gist.github.com/nitaku/6521802
*
* @param size The size of a single gosper island.
*/
function gosper( size = 1 ) {
function fractalize( config ) {
let output;
let input = config.axiom;
for ( let i = 0, il = config.steps; 0 <= il ? i < il : i > il; 0 <= il ? i ++ : i -- ) {
output = '';
for ( let j = 0, jl = input.length; j < jl; j ++ ) {
const char = input[ j ];
if ( char in config.rules ) {
output += config.rules[ char ];
} else {
output += char;
}
}
input = output;
}
return output;
}
function toPoints( config ) {
let currX = 0, currY = 0;
let angle = 0;
const path = [ 0, 0, 0 ];
const fractal = config.fractal;
for ( let i = 0, l = fractal.length; i < l; i ++ ) {
const char = fractal[ i ];
if ( char === '+' ) {
angle += config.angle;
} else if ( char === '-' ) {
angle -= config.angle;
} else if ( char === 'F' ) {
currX += config.size * Math.cos( angle );
currY += - config.size * Math.sin( angle );
path.push( currX, currY, 0 );
}
}
return path;
}
//
const gosper = fractalize( {
axiom: 'A',
steps: 4,
rules: {
A: 'A+BF++BF-FA--FAFA-BF+',
B: '-FA+BFBF++BF+FA--FA-B'
}
} );
const points = toPoints( {
fractal: gosper,
size: size,
angle: Math.PI / 3 // 60 degrees
} );
return points;
}
export {
hilbert2D,
hilbert3D,
gosper,
};

View File

@ -0,0 +1,125 @@
import {
DataTexture,
FloatType,
MathUtils,
RedFormat,
ShaderMaterial,
UniformsUtils
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from './Pass.js';
import { DigitalGlitch } from '../shaders/DigitalGlitch.js';
class GlitchPass extends Pass {
constructor( dt_size = 64 ) {
super();
const shader = DigitalGlitch;
this.uniforms = UniformsUtils.clone( shader.uniforms );
this.heightMap = this.generateHeightmap( dt_size );
this.uniforms[ 'tDisp' ].value = this.heightMap;
this.material = new ShaderMaterial( {
uniforms: this.uniforms,
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader
} );
this.fsQuad = new FullScreenQuad( this.material );
this.goWild = false;
this.curF = 0;
this.generateTrigger();
}
render( renderer, writeBuffer, readBuffer /*, deltaTime, maskActive */ ) {
this.uniforms[ 'tDiffuse' ].value = readBuffer.texture;
this.uniforms[ 'seed' ].value = Math.random();//default seeding
this.uniforms[ 'byp' ].value = 0;
if ( this.curF % this.randX == 0 || this.goWild == true ) {
this.uniforms[ 'amount' ].value = Math.random() / 30;
this.uniforms[ 'angle' ].value = MathUtils.randFloat( - Math.PI, Math.PI );
this.uniforms[ 'seed_x' ].value = MathUtils.randFloat( - 1, 1 );
this.uniforms[ 'seed_y' ].value = MathUtils.randFloat( - 1, 1 );
this.uniforms[ 'distortion_x' ].value = MathUtils.randFloat( 0, 1 );
this.uniforms[ 'distortion_y' ].value = MathUtils.randFloat( 0, 1 );
this.curF = 0;
this.generateTrigger();
} else if ( this.curF % this.randX < this.randX / 5 ) {
this.uniforms[ 'amount' ].value = Math.random() / 90;
this.uniforms[ 'angle' ].value = MathUtils.randFloat( - Math.PI, Math.PI );
this.uniforms[ 'distortion_x' ].value = MathUtils.randFloat( 0, 1 );
this.uniforms[ 'distortion_y' ].value = MathUtils.randFloat( 0, 1 );
this.uniforms[ 'seed_x' ].value = MathUtils.randFloat( - 0.3, 0.3 );
this.uniforms[ 'seed_y' ].value = MathUtils.randFloat( - 0.3, 0.3 );
} else if ( this.goWild == false ) {
this.uniforms[ 'byp' ].value = 1;
}
this.curF ++;
if ( this.renderToScreen ) {
renderer.setRenderTarget( null );
this.fsQuad.render( renderer );
} else {
renderer.setRenderTarget( writeBuffer );
if ( this.clear ) renderer.clear();
this.fsQuad.render( renderer );
}
}
generateTrigger() {
this.randX = MathUtils.randInt( 120, 240 );
}
generateHeightmap( dt_size ) {
const data_arr = new Float32Array( dt_size * dt_size );
const length = dt_size * dt_size;
for ( let i = 0; i < length; i ++ ) {
const val = MathUtils.randFloat( 0, 1 );
data_arr[ i ] = val;
}
const texture = new DataTexture( data_arr, dt_size, dt_size, RedFormat, FloatType );
texture.needsUpdate = true;
return texture;
}
dispose() {
this.material.dispose();
this.heightMap.dispose();
this.fsQuad.dispose();
}
}
export { GlitchPass };

View File

@ -0,0 +1,321 @@
import {
Color,
Vector3
} from '/static/javascript/three/build/three.module.js';
/**
* God-rays (crepuscular rays)
*
* Similar implementation to the one used by Crytek for CryEngine 2 [Sousa2008].
* Blurs a mask generated from the depth map along radial lines emanating from the light
* source. The blur repeatedly applies a blur filter of increasing support but constant
* sample count to produce a blur filter with large support.
*
* My implementation performs 3 passes, similar to the implementation from Sousa. I found
* just 6 samples per pass produced acceptible results. The blur is applied three times,
* with decreasing filter support. The result is equivalent to a single pass with
* 6*6*6 = 216 samples.
*
* References:
*
* Sousa2008 - Crysis Next Gen Effects, GDC2008, http://www.crytek.com/sites/default/files/GDC08_SousaT_CrysisEffects.ppt
*/
const GodRaysDepthMaskShader = {
name: 'GodRaysDepthMaskShader',
uniforms: {
tInput: {
value: null
}
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
varying vec2 vUv;
uniform sampler2D tInput;
void main() {
gl_FragColor = vec4( 1.0 ) - texture2D( tInput, vUv );
}`
};
/**
* The god-ray generation shader.
*
* First pass:
*
* The depth map is blurred along radial lines towards the "sun". The
* output is written to a temporary render target (I used a 1/4 sized
* target).
*
* Pass two & three:
*
* The results of the previous pass are re-blurred, each time with a
* decreased distance between samples.
*/
const GodRaysGenerateShader = {
name: 'GodRaysGenerateShader',
uniforms: {
tInput: {
value: null
},
fStepSize: {
value: 1.0
},
vSunPositionScreenSpace: {
value: new Vector3()
}
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
#define TAPS_PER_PASS 6.0
varying vec2 vUv;
uniform sampler2D tInput;
uniform vec3 vSunPositionScreenSpace;
uniform float fStepSize; // filter step size
void main() {
// delta from current pixel to "sun" position
vec2 delta = vSunPositionScreenSpace.xy - vUv;
float dist = length( delta );
// Step vector (uv space)
vec2 stepv = fStepSize * delta / dist;
// Number of iterations between pixel and sun
float iters = dist/fStepSize;
vec2 uv = vUv.xy;
float col = 0.0;
// This breaks ANGLE in Chrome 22
// - see http://code.google.com/p/chromium/issues/detail?id=153105
/*
// Unrolling didnt do much on my hardware (ATI Mobility Radeon 3450),
// so i've just left the loop
"for ( float i = 0.0; i < TAPS_PER_PASS; i += 1.0 ) {",
// Accumulate samples, making sure we dont walk past the light source.
// The check for uv.y < 1 would not be necessary with "border" UV wrap
// mode, with a black border color. I don't think this is currently
// exposed by three.js. As a result there might be artifacts when the
// sun is to the left, right or bottom of screen as these cases are
// not specifically handled.
" col += ( i <= iters && uv.y < 1.0 ? texture2D( tInput, uv ).r : 0.0 );",
" uv += stepv;",
"}",
*/
// Unrolling loop manually makes it work in ANGLE
float f = min( 1.0, max( vSunPositionScreenSpace.z / 1000.0, 0.0 ) ); // used to fade out godrays
if ( 0.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 1.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 2.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 3.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 4.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
if ( 5.0 <= iters && uv.y < 1.0 ) col += texture2D( tInput, uv ).r * f;
uv += stepv;
// Should technically be dividing by 'iters but 'TAPS_PER_PASS' smooths out
// objectionable artifacts, in particular near the sun position. The side
// effect is that the result is darker than it should be around the sun, as
// TAPS_PER_PASS is greater than the number of samples actually accumulated.
// When the result is inverted (in the shader 'godrays_combine this produces
// a slight bright spot at the position of the sun, even when it is occluded.
gl_FragColor = vec4( col/TAPS_PER_PASS );
gl_FragColor.a = 1.0;
}`
};
/**
* Additively applies god rays from texture tGodRays to a background (tColors).
* fGodRayIntensity attenuates the god rays.
*/
const GodRaysCombineShader = {
name: 'GodRaysCombineShader',
uniforms: {
tColors: {
value: null
},
tGodRays: {
value: null
},
fGodRayIntensity: {
value: 0.69
}
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
varying vec2 vUv;
uniform sampler2D tColors;
uniform sampler2D tGodRays;
uniform float fGodRayIntensity;
void main() {
// Since THREE.MeshDepthMaterial renders foreground objects white and background
// objects black, the god-rays will be white streaks. Therefore value is inverted
// before being combined with tColors
gl_FragColor = texture2D( tColors, vUv ) + fGodRayIntensity * vec4( 1.0 - texture2D( tGodRays, vUv ).r );
gl_FragColor.a = 1.0;
}`
};
/**
* A dodgy sun/sky shader. Makes a bright spot at the sun location. Would be
* cheaper/faster/simpler to implement this as a simple sun sprite.
*/
const GodRaysFakeSunShader = {
name: 'GodRaysFakeSunShader',
uniforms: {
vSunPositionScreenSpace: {
value: new Vector3()
},
fAspect: {
value: 1.0
},
sunColor: {
value: new Color( 0xffee00 )
},
bgColor: {
value: new Color( 0x000000 )
}
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
varying vec2 vUv;
uniform vec3 vSunPositionScreenSpace;
uniform float fAspect;
uniform vec3 sunColor;
uniform vec3 bgColor;
void main() {
vec2 diff = vUv - vSunPositionScreenSpace.xy;
// Correct for aspect ratio
diff.x *= fAspect;
float prop = clamp( length( diff ) / 0.5, 0.0, 1.0 );
prop = 0.35 * pow( 1.0 - prop, 3.0 );
gl_FragColor.xyz = ( vSunPositionScreenSpace.z > 0.0 ) ? mix( sunColor, bgColor, 1.0 - prop ) : bgColor;
gl_FragColor.w = 1.0;
}`
};
export { GodRaysDepthMaskShader, GodRaysGenerateShader, GodRaysCombineShader, GodRaysFakeSunShader };

View File

@ -0,0 +1,115 @@
import {
CubeTexture,
DataTexture,
FileLoader,
FloatType,
HalfFloatType,
LinearFilter,
LinearSRGBColorSpace,
Loader
} from '/static/javascript/three/build/three.module.js';
import { RGBELoader } from '../loaders/RGBELoader.js';
class HDRCubeTextureLoader extends Loader {
constructor( manager ) {
super( manager );
this.hdrLoader = new RGBELoader();
this.type = HalfFloatType;
}
load( urls, onLoad, onProgress, onError ) {
const texture = new CubeTexture();
texture.type = this.type;
switch ( texture.type ) {
case FloatType:
texture.colorSpace = LinearSRGBColorSpace;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
break;
case HalfFloatType:
texture.colorSpace = LinearSRGBColorSpace;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
break;
}
const scope = this;
let loaded = 0;
function loadHDRData( i, onLoad, onProgress, onError ) {
new FileLoader( scope.manager )
.setPath( scope.path )
.setResponseType( 'arraybuffer' )
.setWithCredentials( scope.withCredentials )
.load( urls[ i ], function ( buffer ) {
loaded ++;
const texData = scope.hdrLoader.parse( buffer );
if ( ! texData ) return;
if ( texData.data !== undefined ) {
const dataTexture = new DataTexture( texData.data, texData.width, texData.height );
dataTexture.type = texture.type;
dataTexture.colorSpace = texture.colorSpace;
dataTexture.format = texture.format;
dataTexture.minFilter = texture.minFilter;
dataTexture.magFilter = texture.magFilter;
dataTexture.generateMipmaps = texture.generateMipmaps;
texture.images[ i ] = dataTexture;
}
if ( loaded === 6 ) {
texture.needsUpdate = true;
if ( onLoad ) onLoad( texture );
}
}, onProgress, onError );
}
for ( let i = 0; i < urls.length; i ++ ) {
loadHDRData( i, onLoad, onProgress, onError );
}
return texture;
}
setDataType( value ) {
this.type = value;
this.hdrLoader.setDataType( value );
return this;
}
}
export { HDRCubeTextureLoader };

View File

@ -0,0 +1,79 @@
import {
ShaderMaterial,
UniformsUtils
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from './Pass.js';
import { HalftoneShader } from '../shaders/HalftoneShader.js';
/**
* RGB Halftone pass for three.js effects composer. Requires HalftoneShader.
*/
class HalftonePass extends Pass {
constructor( width, height, params ) {
super();
this.uniforms = UniformsUtils.clone( HalftoneShader.uniforms );
this.material = new ShaderMaterial( {
uniforms: this.uniforms,
fragmentShader: HalftoneShader.fragmentShader,
vertexShader: HalftoneShader.vertexShader
} );
// set params
this.uniforms.width.value = width;
this.uniforms.height.value = height;
for ( const key in params ) {
if ( params.hasOwnProperty( key ) && this.uniforms.hasOwnProperty( key ) ) {
this.uniforms[ key ].value = params[ key ];
}
}
this.fsQuad = new FullScreenQuad( this.material );
}
render( renderer, writeBuffer, readBuffer/*, deltaTime, maskActive*/ ) {
this.material.uniforms[ 'tDiffuse' ].value = readBuffer.texture;
if ( this.renderToScreen ) {
renderer.setRenderTarget( null );
this.fsQuad.render( renderer );
} else {
renderer.setRenderTarget( writeBuffer );
if ( this.clear ) renderer.clear();
this.fsQuad.render( renderer );
}
}
setSize( width, height ) {
this.uniforms.width.value = width;
this.uniforms.height.value = height;
}
dispose() {
this.material.dispose();
this.fsQuad.dispose();
}
}
export { HalftonePass };

View File

@ -0,0 +1,312 @@
/**
* RGB Halftone shader for three.js.
* NOTE:
* Shape (1 = Dot, 2 = Ellipse, 3 = Line, 4 = Square)
* Blending Mode (1 = Linear, 2 = Multiply, 3 = Add, 4 = Lighter, 5 = Darker)
*/
const HalftoneShader = {
name: 'HalftoneShader',
uniforms: {
'tDiffuse': { value: null },
'shape': { value: 1 },
'radius': { value: 4 },
'rotateR': { value: Math.PI / 12 * 1 },
'rotateG': { value: Math.PI / 12 * 2 },
'rotateB': { value: Math.PI / 12 * 3 },
'scatter': { value: 0 },
'width': { value: 1 },
'height': { value: 1 },
'blending': { value: 1 },
'blendingMode': { value: 1 },
'greyscale': { value: false },
'disable': { value: false }
},
vertexShader: /* glsl */`
varying vec2 vUV;
void main() {
vUV = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}`,
fragmentShader: /* glsl */`
#define SQRT2_MINUS_ONE 0.41421356
#define SQRT2_HALF_MINUS_ONE 0.20710678
#define PI2 6.28318531
#define SHAPE_DOT 1
#define SHAPE_ELLIPSE 2
#define SHAPE_LINE 3
#define SHAPE_SQUARE 4
#define BLENDING_LINEAR 1
#define BLENDING_MULTIPLY 2
#define BLENDING_ADD 3
#define BLENDING_LIGHTER 4
#define BLENDING_DARKER 5
uniform sampler2D tDiffuse;
uniform float radius;
uniform float rotateR;
uniform float rotateG;
uniform float rotateB;
uniform float scatter;
uniform float width;
uniform float height;
uniform int shape;
uniform bool disable;
uniform float blending;
uniform int blendingMode;
varying vec2 vUV;
uniform bool greyscale;
const int samples = 8;
float blend( float a, float b, float t ) {
// linear blend
return a * ( 1.0 - t ) + b * t;
}
float hypot( float x, float y ) {
// vector magnitude
return sqrt( x * x + y * y );
}
float rand( vec2 seed ){
// get pseudo-random number
return fract( sin( dot( seed.xy, vec2( 12.9898, 78.233 ) ) ) * 43758.5453 );
}
float distanceToDotRadius( float channel, vec2 coord, vec2 normal, vec2 p, float angle, float rad_max ) {
// apply shape-specific transforms
float dist = hypot( coord.x - p.x, coord.y - p.y );
float rad = channel;
if ( shape == SHAPE_DOT ) {
rad = pow( abs( rad ), 1.125 ) * rad_max;
} else if ( shape == SHAPE_ELLIPSE ) {
rad = pow( abs( rad ), 1.125 ) * rad_max;
if ( dist != 0.0 ) {
float dot_p = abs( ( p.x - coord.x ) / dist * normal.x + ( p.y - coord.y ) / dist * normal.y );
dist = ( dist * ( 1.0 - SQRT2_HALF_MINUS_ONE ) ) + dot_p * dist * SQRT2_MINUS_ONE;
}
} else if ( shape == SHAPE_LINE ) {
rad = pow( abs( rad ), 1.5) * rad_max;
float dot_p = ( p.x - coord.x ) * normal.x + ( p.y - coord.y ) * normal.y;
dist = hypot( normal.x * dot_p, normal.y * dot_p );
} else if ( shape == SHAPE_SQUARE ) {
float theta = atan( p.y - coord.y, p.x - coord.x ) - angle;
float sin_t = abs( sin( theta ) );
float cos_t = abs( cos( theta ) );
rad = pow( abs( rad ), 1.4 );
rad = rad_max * ( rad + ( ( sin_t > cos_t ) ? rad - sin_t * rad : rad - cos_t * rad ) );
}
return rad - dist;
}
struct Cell {
// grid sample positions
vec2 normal;
vec2 p1;
vec2 p2;
vec2 p3;
vec2 p4;
float samp2;
float samp1;
float samp3;
float samp4;
};
vec4 getSample( vec2 point ) {
// multi-sampled point
vec4 tex = texture2D( tDiffuse, vec2( point.x / width, point.y / height ) );
float base = rand( vec2( floor( point.x ), floor( point.y ) ) ) * PI2;
float step = PI2 / float( samples );
float dist = radius * 0.66;
for ( int i = 0; i < samples; ++i ) {
float r = base + step * float( i );
vec2 coord = point + vec2( cos( r ) * dist, sin( r ) * dist );
tex += texture2D( tDiffuse, vec2( coord.x / width, coord.y / height ) );
}
tex /= float( samples ) + 1.0;
return tex;
}
float getDotColour( Cell c, vec2 p, int channel, float angle, float aa ) {
// get colour for given point
float dist_c_1, dist_c_2, dist_c_3, dist_c_4, res;
if ( channel == 0 ) {
c.samp1 = getSample( c.p1 ).r;
c.samp2 = getSample( c.p2 ).r;
c.samp3 = getSample( c.p3 ).r;
c.samp4 = getSample( c.p4 ).r;
} else if (channel == 1) {
c.samp1 = getSample( c.p1 ).g;
c.samp2 = getSample( c.p2 ).g;
c.samp3 = getSample( c.p3 ).g;
c.samp4 = getSample( c.p4 ).g;
} else {
c.samp1 = getSample( c.p1 ).b;
c.samp3 = getSample( c.p3 ).b;
c.samp2 = getSample( c.p2 ).b;
c.samp4 = getSample( c.p4 ).b;
}
dist_c_1 = distanceToDotRadius( c.samp1, c.p1, c.normal, p, angle, radius );
dist_c_2 = distanceToDotRadius( c.samp2, c.p2, c.normal, p, angle, radius );
dist_c_3 = distanceToDotRadius( c.samp3, c.p3, c.normal, p, angle, radius );
dist_c_4 = distanceToDotRadius( c.samp4, c.p4, c.normal, p, angle, radius );
res = ( dist_c_1 > 0.0 ) ? clamp( dist_c_1 / aa, 0.0, 1.0 ) : 0.0;
res += ( dist_c_2 > 0.0 ) ? clamp( dist_c_2 / aa, 0.0, 1.0 ) : 0.0;
res += ( dist_c_3 > 0.0 ) ? clamp( dist_c_3 / aa, 0.0, 1.0 ) : 0.0;
res += ( dist_c_4 > 0.0 ) ? clamp( dist_c_4 / aa, 0.0, 1.0 ) : 0.0;
res = clamp( res, 0.0, 1.0 );
return res;
}
Cell getReferenceCell( vec2 p, vec2 origin, float grid_angle, float step ) {
// get containing cell
Cell c;
// calc grid
vec2 n = vec2( cos( grid_angle ), sin( grid_angle ) );
float threshold = step * 0.5;
float dot_normal = n.x * ( p.x - origin.x ) + n.y * ( p.y - origin.y );
float dot_line = -n.y * ( p.x - origin.x ) + n.x * ( p.y - origin.y );
vec2 offset = vec2( n.x * dot_normal, n.y * dot_normal );
float offset_normal = mod( hypot( offset.x, offset.y ), step );
float normal_dir = ( dot_normal < 0.0 ) ? 1.0 : -1.0;
float normal_scale = ( ( offset_normal < threshold ) ? -offset_normal : step - offset_normal ) * normal_dir;
float offset_line = mod( hypot( ( p.x - offset.x ) - origin.x, ( p.y - offset.y ) - origin.y ), step );
float line_dir = ( dot_line < 0.0 ) ? 1.0 : -1.0;
float line_scale = ( ( offset_line < threshold ) ? -offset_line : step - offset_line ) * line_dir;
// get closest corner
c.normal = n;
c.p1.x = p.x - n.x * normal_scale + n.y * line_scale;
c.p1.y = p.y - n.y * normal_scale - n.x * line_scale;
// scatter
if ( scatter != 0.0 ) {
float off_mag = scatter * threshold * 0.5;
float off_angle = rand( vec2( floor( c.p1.x ), floor( c.p1.y ) ) ) * PI2;
c.p1.x += cos( off_angle ) * off_mag;
c.p1.y += sin( off_angle ) * off_mag;
}
// find corners
float normal_step = normal_dir * ( ( offset_normal < threshold ) ? step : -step );
float line_step = line_dir * ( ( offset_line < threshold ) ? step : -step );
c.p2.x = c.p1.x - n.x * normal_step;
c.p2.y = c.p1.y - n.y * normal_step;
c.p3.x = c.p1.x + n.y * line_step;
c.p3.y = c.p1.y - n.x * line_step;
c.p4.x = c.p1.x - n.x * normal_step + n.y * line_step;
c.p4.y = c.p1.y - n.y * normal_step - n.x * line_step;
return c;
}
float blendColour( float a, float b, float t ) {
// blend colours
if ( blendingMode == BLENDING_LINEAR ) {
return blend( a, b, 1.0 - t );
} else if ( blendingMode == BLENDING_ADD ) {
return blend( a, min( 1.0, a + b ), t );
} else if ( blendingMode == BLENDING_MULTIPLY ) {
return blend( a, max( 0.0, a * b ), t );
} else if ( blendingMode == BLENDING_LIGHTER ) {
return blend( a, max( a, b ), t );
} else if ( blendingMode == BLENDING_DARKER ) {
return blend( a, min( a, b ), t );
} else {
return blend( a, b, 1.0 - t );
}
}
void main() {
if ( ! disable ) {
// setup
vec2 p = vec2( vUV.x * width, vUV.y * height );
vec2 origin = vec2( 0, 0 );
float aa = ( radius < 2.5 ) ? radius * 0.5 : 1.25;
// get channel samples
Cell cell_r = getReferenceCell( p, origin, rotateR, radius );
Cell cell_g = getReferenceCell( p, origin, rotateG, radius );
Cell cell_b = getReferenceCell( p, origin, rotateB, radius );
float r = getDotColour( cell_r, p, 0, rotateR, aa );
float g = getDotColour( cell_g, p, 1, rotateG, aa );
float b = getDotColour( cell_b, p, 2, rotateB, aa );
// blend with original
vec4 colour = texture2D( tDiffuse, vUV );
r = blendColour( r, colour.r, blending );
g = blendColour( g, colour.g, blending );
b = blendColour( b, colour.b, blending );
if ( greyscale ) {
r = g = b = (r + b + g) / 3.0;
}
gl_FragColor = vec4( r, g, b, 1.0 );
} else {
gl_FragColor = texture2D( tDiffuse, vUV );
}
}`
};
export { HalftoneShader };

View File

@ -0,0 +1,59 @@
/**
* Two pass Gaussian blur filter (horizontal and vertical blur shaders)
* - see http://www.cake23.de/traveling-wavefronts-lit-up.html
*
* - 9 samples per pass
* - standard deviation 2.7
* - "h" and "v" parameters should be set to "1 / width" and "1 / height"
*/
const HorizontalBlurShader = {
name: 'HorizontalBlurShader',
uniforms: {
'tDiffuse': { value: null },
'h': { value: 1.0 / 512.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform float h;
varying vec2 vUv;
void main() {
vec4 sum = vec4( 0.0 );
sum += texture2D( tDiffuse, vec2( vUv.x - 4.0 * h, vUv.y ) ) * 0.051;
sum += texture2D( tDiffuse, vec2( vUv.x - 3.0 * h, vUv.y ) ) * 0.0918;
sum += texture2D( tDiffuse, vec2( vUv.x - 2.0 * h, vUv.y ) ) * 0.12245;
sum += texture2D( tDiffuse, vec2( vUv.x - 1.0 * h, vUv.y ) ) * 0.1531;
sum += texture2D( tDiffuse, vec2( vUv.x, vUv.y ) ) * 0.1633;
sum += texture2D( tDiffuse, vec2( vUv.x + 1.0 * h, vUv.y ) ) * 0.1531;
sum += texture2D( tDiffuse, vec2( vUv.x + 2.0 * h, vUv.y ) ) * 0.12245;
sum += texture2D( tDiffuse, vec2( vUv.x + 3.0 * h, vUv.y ) ) * 0.0918;
sum += texture2D( tDiffuse, vec2( vUv.x + 4.0 * h, vUv.y ) ) * 0.051;
gl_FragColor = sum;
}`
};
export { HorizontalBlurShader };

View File

@ -0,0 +1,63 @@
/**
* Simple fake tilt-shift effect, modulating two pass Gaussian blur (see above) by vertical position
*
* - 9 samples per pass
* - standard deviation 2.7
* - "h" and "v" parameters should be set to "1 / width" and "1 / height"
* - "r" parameter control where "focused" horizontal line lies
*/
const HorizontalTiltShiftShader = {
name: 'HorizontalTiltShiftShader',
uniforms: {
'tDiffuse': { value: null },
'h': { value: 1.0 / 512.0 },
'r': { value: 0.35 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform float h;
uniform float r;
varying vec2 vUv;
void main() {
vec4 sum = vec4( 0.0 );
float hh = h * abs( r - vUv.y );
sum += texture2D( tDiffuse, vec2( vUv.x - 4.0 * hh, vUv.y ) ) * 0.051;
sum += texture2D( tDiffuse, vec2( vUv.x - 3.0 * hh, vUv.y ) ) * 0.0918;
sum += texture2D( tDiffuse, vec2( vUv.x - 2.0 * hh, vUv.y ) ) * 0.12245;
sum += texture2D( tDiffuse, vec2( vUv.x - 1.0 * hh, vUv.y ) ) * 0.1531;
sum += texture2D( tDiffuse, vec2( vUv.x, vUv.y ) ) * 0.1633;
sum += texture2D( tDiffuse, vec2( vUv.x + 1.0 * hh, vUv.y ) ) * 0.1531;
sum += texture2D( tDiffuse, vec2( vUv.x + 2.0 * hh, vUv.y ) ) * 0.12245;
sum += texture2D( tDiffuse, vec2( vUv.x + 3.0 * hh, vUv.y ) ) * 0.0918;
sum += texture2D( tDiffuse, vec2( vUv.x + 4.0 * hh, vUv.y ) ) * 0.051;
gl_FragColor = sum;
}`
};
export { HorizontalTiltShiftShader };

View File

@ -0,0 +1,67 @@
/**
* Hue and saturation adjustment
* https://github.com/evanw/glfx.js
* hue: -1 to 1 (-1 is 180 degrees in the negative direction, 0 is no change, etc.
* saturation: -1 to 1 (-1 is solid gray, 0 is no change, and 1 is maximum contrast)
*/
const HueSaturationShader = {
name: 'HueSaturationShader',
uniforms: {
'tDiffuse': { value: null },
'hue': { value: 0 },
'saturation': { value: 0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform float hue;
uniform float saturation;
varying vec2 vUv;
void main() {
gl_FragColor = texture2D( tDiffuse, vUv );
// hue
float angle = hue * 3.14159265;
float s = sin(angle), c = cos(angle);
vec3 weights = (vec3(2.0 * c, -sqrt(3.0) * s - c, sqrt(3.0) * s - c) + 1.0) / 3.0;
float len = length(gl_FragColor.rgb);
gl_FragColor.rgb = vec3(
dot(gl_FragColor.rgb, weights.xyz),
dot(gl_FragColor.rgb, weights.zxy),
dot(gl_FragColor.rgb, weights.yzx)
);
// saturation
float average = (gl_FragColor.r + gl_FragColor.g + gl_FragColor.b) / 3.0;
if (saturation > 0.0) {
gl_FragColor.rgb += (average - gl_FragColor.rgb) * (1.0 - 1.0 / (1.001 - saturation));
} else {
gl_FragColor.rgb += (average - gl_FragColor.rgb) * (-saturation);
}
}`
};
export { HueSaturationShader };

View File

@ -0,0 +1,337 @@
import {
DataTexture,
FileLoader,
FloatType,
RedFormat,
MathUtils,
Loader,
UnsignedByteType,
LinearFilter,
HalfFloatType,
DataUtils
} from '/static/javascript/three/build/three.module.js';
class IESLoader extends Loader {
constructor( manager ) {
super( manager );
this.type = HalfFloatType;
}
_getIESValues( iesLamp, type ) {
const width = 360;
const height = 180;
const size = width * height;
const data = new Array( size );
function interpolateCandelaValues( phi, theta ) {
let phiIndex = 0, thetaIndex = 0;
let startTheta = 0, endTheta = 0, startPhi = 0, endPhi = 0;
for ( let i = 0; i < iesLamp.numHorAngles - 1; ++ i ) { // numHorAngles = horAngles.length-1 because of extra padding, so this wont cause an out of bounds error
if ( theta < iesLamp.horAngles[ i + 1 ] || i == iesLamp.numHorAngles - 2 ) {
thetaIndex = i;
startTheta = iesLamp.horAngles[ i ];
endTheta = iesLamp.horAngles[ i + 1 ];
break;
}
}
for ( let i = 0; i < iesLamp.numVerAngles - 1; ++ i ) {
if ( phi < iesLamp.verAngles[ i + 1 ] || i == iesLamp.numVerAngles - 2 ) {
phiIndex = i;
startPhi = iesLamp.verAngles[ i ];
endPhi = iesLamp.verAngles[ i + 1 ];
break;
}
}
const deltaTheta = endTheta - startTheta;
const deltaPhi = endPhi - startPhi;
if ( deltaPhi === 0 ) // Outside range
return 0;
const t1 = deltaTheta === 0 ? 0 : ( theta - startTheta ) / deltaTheta;
const t2 = ( phi - startPhi ) / deltaPhi;
const nextThetaIndex = deltaTheta === 0 ? thetaIndex : thetaIndex + 1;
const v1 = MathUtils.lerp( iesLamp.candelaValues[ thetaIndex ][ phiIndex ], iesLamp.candelaValues[ nextThetaIndex ][ phiIndex ], t1 );
const v2 = MathUtils.lerp( iesLamp.candelaValues[ thetaIndex ][ phiIndex + 1 ], iesLamp.candelaValues[ nextThetaIndex ][ phiIndex + 1 ], t1 );
const v = MathUtils.lerp( v1, v2, t2 );
return v;
}
const startTheta = iesLamp.horAngles[ 0 ], endTheta = iesLamp.horAngles[ iesLamp.numHorAngles - 1 ];
for ( let i = 0; i < size; ++ i ) {
let theta = i % width;
const phi = Math.floor( i / width );
if ( endTheta - startTheta !== 0 && ( theta < startTheta || theta >= endTheta ) ) { // Handle symmetry for hor angles
theta %= endTheta * 2;
if ( theta > endTheta )
theta = endTheta * 2 - theta;
}
data[ phi + theta * height ] = interpolateCandelaValues( phi, theta );
}
let result = null;
if ( type === UnsignedByteType ) result = Uint8Array.from( data.map( v => Math.min( v * 0xFF, 0xFF ) ) );
else if ( type === HalfFloatType ) result = Uint16Array.from( data.map( v => DataUtils.toHalfFloat( v ) ) );
else if ( type === FloatType ) result = Float32Array.from( data );
else console.error( 'IESLoader: Unsupported type:', type );
return result;
}
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setResponseType( 'text' );
loader.setCrossOrigin( this.crossOrigin );
loader.setWithCredentials( this.withCredentials );
loader.setPath( this.path );
loader.setRequestHeader( this.requestHeader );
loader.load( url, text => {
onLoad( this.parse( text ) );
}, onProgress, onError );
}
parse( text ) {
const type = this.type;
const iesLamp = new IESLamp( text );
const data = this._getIESValues( iesLamp, type );
const texture = new DataTexture( data, 180, 1, RedFormat, type );
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.needsUpdate = true;
return texture;
}
}
function IESLamp( text ) {
const _self = this;
const textArray = text.split( '\n' );
let lineNumber = 0;
let line;
_self.verAngles = [ ];
_self.horAngles = [ ];
_self.candelaValues = [ ];
_self.tiltData = { };
_self.tiltData.angles = [ ];
_self.tiltData.mulFactors = [ ];
function textToArray( text ) {
text = text.replace( /^\s+|\s+$/g, '' ); // remove leading or trailing spaces
text = text.replace( /,/g, ' ' ); // replace commas with spaces
text = text.replace( /\s\s+/g, ' ' ); // replace white space/tabs etc by single whitespace
const array = text.split( ' ' );
return array;
}
function readArray( count, array ) {
while ( true ) {
const line = textArray[ lineNumber ++ ];
const lineData = textToArray( line );
for ( let i = 0; i < lineData.length; ++ i ) {
array.push( Number( lineData[ i ] ) );
}
if ( array.length === count )
break;
}
}
function readTilt() {
let line = textArray[ lineNumber ++ ];
let lineData = textToArray( line );
_self.tiltData.lampToLumGeometry = Number( lineData[ 0 ] );
line = textArray[ lineNumber ++ ];
lineData = textToArray( line );
_self.tiltData.numAngles = Number( lineData[ 0 ] );
readArray( _self.tiltData.numAngles, _self.tiltData.angles );
readArray( _self.tiltData.numAngles, _self.tiltData.mulFactors );
}
function readLampValues() {
const values = [ ];
readArray( 10, values );
_self.count = Number( values[ 0 ] );
_self.lumens = Number( values[ 1 ] );
_self.multiplier = Number( values[ 2 ] );
_self.numVerAngles = Number( values[ 3 ] );
_self.numHorAngles = Number( values[ 4 ] );
_self.gonioType = Number( values[ 5 ] );
_self.units = Number( values[ 6 ] );
_self.width = Number( values[ 7 ] );
_self.length = Number( values[ 8 ] );
_self.height = Number( values[ 9 ] );
}
function readLampFactors() {
const values = [ ];
readArray( 3, values );
_self.ballFactor = Number( values[ 0 ] );
_self.blpFactor = Number( values[ 1 ] );
_self.inputWatts = Number( values[ 2 ] );
}
while ( true ) {
line = textArray[ lineNumber ++ ];
if ( line.includes( 'TILT' ) ) {
break;
}
}
if ( ! line.includes( 'NONE' ) ) {
if ( line.includes( 'INCLUDE' ) ) {
readTilt();
} else {
// TODO:: Read tilt data from a file
}
}
readLampValues();
readLampFactors();
// Initialize candela value array
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
_self.candelaValues.push( [ ] );
}
// Parse Angles
readArray( _self.numVerAngles, _self.verAngles );
readArray( _self.numHorAngles, _self.horAngles );
// Parse Candela values
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
readArray( _self.numVerAngles, _self.candelaValues[ i ] );
}
// Calculate actual candela values, and normalize.
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
for ( let j = 0; j < _self.numVerAngles; ++ j ) {
_self.candelaValues[ i ][ j ] *= _self.candelaValues[ i ][ j ] * _self.multiplier
* _self.ballFactor * _self.blpFactor;
}
}
let maxVal = - 1;
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
for ( let j = 0; j < _self.numVerAngles; ++ j ) {
const value = _self.candelaValues[ i ][ j ];
maxVal = maxVal < value ? value : maxVal;
}
}
const bNormalize = true;
if ( bNormalize && maxVal > 0 ) {
for ( let i = 0; i < _self.numHorAngles; ++ i ) {
for ( let j = 0; j < _self.numVerAngles; ++ j ) {
_self.candelaValues[ i ][ j ] /= maxVal;
}
}
}
}
export { IESLoader };

View File

@ -0,0 +1,130 @@
import {
FileLoader,
Group,
Loader,
LoadingManager
} from '/static/javascript/three/build/three.module.js';
import { ColladaLoader } from '../loaders/ColladaLoader.js';
import * as fflate from '../libs/fflate.module.js';
class KMZLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
parse( data ) {
function findFile( url ) {
for ( const path in zip ) {
if ( path.slice( - url.length ) === url ) {
return zip[ path ];
}
}
}
const manager = new LoadingManager();
manager.setURLModifier( function ( url ) {
const image = findFile( url );
if ( image ) {
console.log( 'Loading', url );
const blob = new Blob( [ image.buffer ], { type: 'application/octet-stream' } );
return URL.createObjectURL( blob );
}
return url;
} );
//
const zip = fflate.unzipSync( new Uint8Array( data ) );
if ( zip[ 'doc.kml' ] ) {
const xml = new DOMParser().parseFromString( fflate.strFromU8( zip[ 'doc.kml' ] ), 'application/xml' );
const model = xml.querySelector( 'Placemark Model Link href' );
if ( model ) {
const loader = new ColladaLoader( manager );
return loader.parse( fflate.strFromU8( zip[ model.textContent ] ) );
}
} else {
console.warn( 'KMZLoader: Missing doc.kml file.' );
for ( const path in zip ) {
const extension = path.split( '.' ).pop().toLowerCase();
if ( extension === 'dae' ) {
const loader = new ColladaLoader( manager );
return loader.parse( fflate.strFromU8( zip[ path ] ) );
}
}
}
console.error( 'KMZLoader: Couldn\'t find .dae file.' );
return { scene: new Group() };
}
}
export { KMZLoader };

View File

@ -0,0 +1,925 @@
/**
* Loader for KTX 2.0 GPU Texture containers.
*
* KTX 2.0 is a container format for various GPU texture formats. The loader
* supports Basis Universal GPU textures, which can be quickly transcoded to
* a wide variety of GPU texture compression formats, as well as some
* uncompressed DataTexture and Data3DTexture formats.
*
* References:
* - KTX: http://github.khronos.org/KTX-Specification/
* - DFD: https://www.khronos.org/registry/DataFormat/specs/1.3/dataformat.1.3.html#basicdescriptor
*/
import {
CompressedTexture,
CompressedArrayTexture,
CompressedCubeTexture,
Data3DTexture,
DataTexture,
DisplayP3ColorSpace,
FileLoader,
FloatType,
HalfFloatType,
NoColorSpace,
LinearFilter,
LinearMipmapLinearFilter,
LinearDisplayP3ColorSpace,
LinearSRGBColorSpace,
Loader,
RedFormat,
RGB_ETC1_Format,
RGB_ETC2_Format,
RGB_PVRTC_4BPPV1_Format,
RGBA_ASTC_4x4_Format,
RGBA_ASTC_6x6_Format,
RGBA_BPTC_Format,
RGBA_ETC2_EAC_Format,
RGBA_PVRTC_4BPPV1_Format,
RGBA_S3TC_DXT5_Format,
RGBA_S3TC_DXT1_Format,
RGBAFormat,
RGFormat,
SRGBColorSpace,
UnsignedByteType,
} from '/static/javascript/three/build/three.module.js';
import { WorkerPool } from'/static/javascript/three/examples/jsm/utils/WorkerPool.js';
import {
read,
KHR_DF_FLAG_ALPHA_PREMULTIPLIED,
KHR_DF_TRANSFER_SRGB,
KHR_SUPERCOMPRESSION_NONE,
KHR_SUPERCOMPRESSION_ZSTD,
VK_FORMAT_UNDEFINED,
VK_FORMAT_R16_SFLOAT,
VK_FORMAT_R16G16_SFLOAT,
VK_FORMAT_R16G16B16A16_SFLOAT,
VK_FORMAT_R32_SFLOAT,
VK_FORMAT_R32G32_SFLOAT,
VK_FORMAT_R32G32B32A32_SFLOAT,
VK_FORMAT_R8_SRGB,
VK_FORMAT_R8_UNORM,
VK_FORMAT_R8G8_SRGB,
VK_FORMAT_R8G8_UNORM,
VK_FORMAT_R8G8B8A8_SRGB,
VK_FORMAT_R8G8B8A8_UNORM,
VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
KHR_DF_PRIMARIES_UNSPECIFIED,
KHR_DF_PRIMARIES_BT709,
KHR_DF_PRIMARIES_DISPLAYP3
} from '../libs/ktx-parse.module.js';
import { ZSTDDecoder } from '../libs/zstddec.module.js';
const _taskCache = new WeakMap();
let _activeLoaders = 0;
let _zstd;
class KTX2Loader extends Loader {
constructor( manager ) {
super( manager );
this.transcoderPath = '';
this.transcoderBinary = null;
this.transcoderPending = null;
this.workerPool = new WorkerPool();
this.workerSourceURL = '';
this.workerConfig = null;
if ( typeof MSC_TRANSCODER !== 'undefined' ) {
console.warn(
'THREE.KTX2Loader: Please update to latest "basis_transcoder".'
+ ' "msc_basis_transcoder" is no longer supported in three.js r125+.'
);
}
}
setTranscoderPath( path ) {
this.transcoderPath = path;
return this;
}
setWorkerLimit( num ) {
this.workerPool.setWorkerLimit( num );
return this;
}
async detectSupportAsync( renderer ) {
this.workerConfig = {
astcSupported: await renderer.hasFeatureAsync( 'texture-compression-astc' ),
etc1Supported: await renderer.hasFeatureAsync( 'texture-compression-etc1' ),
etc2Supported: await renderer.hasFeatureAsync( 'texture-compression-etc2' ),
dxtSupported: await renderer.hasFeatureAsync( 'texture-compression-bc' ),
bptcSupported: await renderer.hasFeatureAsync( 'texture-compression-bptc' ),
pvrtcSupported: await renderer.hasFeatureAsync( 'texture-compression-pvrtc' )
};
return this;
}
detectSupport( renderer ) {
if ( renderer.isWebGPURenderer === true ) {
this.workerConfig = {
astcSupported: renderer.hasFeature( 'texture-compression-astc' ),
etc1Supported: renderer.hasFeature( 'texture-compression-etc1' ),
etc2Supported: renderer.hasFeature( 'texture-compression-etc2' ),
dxtSupported: renderer.hasFeature( 'texture-compression-bc' ),
bptcSupported: renderer.hasFeature( 'texture-compression-bptc' ),
pvrtcSupported: renderer.hasFeature( 'texture-compression-pvrtc' )
};
} else {
this.workerConfig = {
astcSupported: renderer.extensions.has( 'WEBGL_compressed_texture_astc' ),
etc1Supported: renderer.extensions.has( 'WEBGL_compressed_texture_etc1' ),
etc2Supported: renderer.extensions.has( 'WEBGL_compressed_texture_etc' ),
dxtSupported: renderer.extensions.has( 'WEBGL_compressed_texture_s3tc' ),
bptcSupported: renderer.extensions.has( 'EXT_texture_compression_bptc' ),
pvrtcSupported: renderer.extensions.has( 'WEBGL_compressed_texture_pvrtc' )
|| renderer.extensions.has( 'WEBKIT_WEBGL_compressed_texture_pvrtc' )
};
}
return this;
}
init() {
if ( ! this.transcoderPending ) {
// Load transcoder wrapper.
const jsLoader = new FileLoader( this.manager );
jsLoader.setPath( this.transcoderPath );
jsLoader.setWithCredentials( this.withCredentials );
const jsContent = jsLoader.loadAsync( 'basis_transcoder.js' );
// Load transcoder WASM binary.
const binaryLoader = new FileLoader( this.manager );
binaryLoader.setPath( this.transcoderPath );
binaryLoader.setResponseType( 'arraybuffer' );
binaryLoader.setWithCredentials( this.withCredentials );
const binaryContent = binaryLoader.loadAsync( 'basis_transcoder.wasm' );
this.transcoderPending = Promise.all( [ jsContent, binaryContent ] )
.then( ( [ jsContent, binaryContent ] ) => {
const fn = KTX2Loader.BasisWorker.toString();
const body = [
'/* constants */',
'let _EngineFormat = ' + JSON.stringify( KTX2Loader.EngineFormat ),
'let _TranscoderFormat = ' + JSON.stringify( KTX2Loader.TranscoderFormat ),
'let _BasisFormat = ' + JSON.stringify( KTX2Loader.BasisFormat ),
'/* basis_transcoder.js */',
jsContent,
'/* worker */',
fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) )
].join( '\n' );
this.workerSourceURL = URL.createObjectURL( new Blob( [ body ] ) );
this.transcoderBinary = binaryContent;
this.workerPool.setWorkerCreator( () => {
const worker = new Worker( this.workerSourceURL );
const transcoderBinary = this.transcoderBinary.slice( 0 );
worker.postMessage( { type: 'init', config: this.workerConfig, transcoderBinary }, [ transcoderBinary ] );
return worker;
} );
} );
if ( _activeLoaders > 0 ) {
// Each instance loads a transcoder and allocates workers, increasing network and memory cost.
console.warn(
'THREE.KTX2Loader: Multiple active KTX2 loaders may cause performance issues.'
+ ' Use a single KTX2Loader instance, or call .dispose() on old instances.'
);
}
_activeLoaders ++;
}
return this.transcoderPending;
}
load( url, onLoad, onProgress, onError ) {
if ( this.workerConfig === null ) {
throw new Error( 'THREE.KTX2Loader: Missing initialization with `.detectSupport( renderer )`.' );
}
const loader = new FileLoader( this.manager );
loader.setResponseType( 'arraybuffer' );
loader.setWithCredentials( this.withCredentials );
loader.load( url, ( buffer ) => {
// Check for an existing task using this buffer. A transferred buffer cannot be transferred
// again from this thread.
if ( _taskCache.has( buffer ) ) {
const cachedTask = _taskCache.get( buffer );
return cachedTask.promise.then( onLoad ).catch( onError );
}
this._createTexture( buffer )
.then( ( texture ) => onLoad ? onLoad( texture ) : null )
.catch( onError );
}, onProgress, onError );
}
_createTextureFrom( transcodeResult, container ) {
const { faces, width, height, format, type, error, dfdFlags } = transcodeResult;
if ( type === 'error' ) return Promise.reject( error );
let texture;
if ( container.faceCount === 6 ) {
texture = new CompressedCubeTexture( faces, format, UnsignedByteType );
} else {
const mipmaps = faces[ 0 ].mipmaps;
texture = container.layerCount > 1
? new CompressedArrayTexture( mipmaps, width, height, container.layerCount, format, UnsignedByteType )
: new CompressedTexture( mipmaps, width, height, format, UnsignedByteType );
}
texture.minFilter = faces[ 0 ].mipmaps.length === 1 ? LinearFilter : LinearMipmapLinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.needsUpdate = true;
texture.colorSpace = parseColorSpace( container );
texture.premultiplyAlpha = !! ( dfdFlags & KHR_DF_FLAG_ALPHA_PREMULTIPLIED );
return texture;
}
/**
* @param {ArrayBuffer} buffer
* @param {object?} config
* @return {Promise<CompressedTexture|CompressedArrayTexture|DataTexture|Data3DTexture>}
*/
async _createTexture( buffer, config = {} ) {
const container = read( new Uint8Array( buffer ) );
if ( container.vkFormat !== VK_FORMAT_UNDEFINED ) {
return createRawTexture( container );
}
//
const taskConfig = config;
const texturePending = this.init().then( () => {
return this.workerPool.postMessage( { type: 'transcode', buffer, taskConfig: taskConfig }, [ buffer ] );
} ).then( ( e ) => this._createTextureFrom( e.data, container ) );
// Cache the task result.
_taskCache.set( buffer, { promise: texturePending } );
return texturePending;
}
dispose() {
this.workerPool.dispose();
if ( this.workerSourceURL ) URL.revokeObjectURL( this.workerSourceURL );
_activeLoaders --;
return this;
}
}
/* CONSTANTS */
KTX2Loader.BasisFormat = {
ETC1S: 0,
UASTC_4x4: 1,
};
KTX2Loader.TranscoderFormat = {
ETC1: 0,
ETC2: 1,
BC1: 2,
BC3: 3,
BC4: 4,
BC5: 5,
BC7_M6_OPAQUE_ONLY: 6,
BC7_M5: 7,
PVRTC1_4_RGB: 8,
PVRTC1_4_RGBA: 9,
ASTC_4x4: 10,
ATC_RGB: 11,
ATC_RGBA_INTERPOLATED_ALPHA: 12,
RGBA32: 13,
RGB565: 14,
BGR565: 15,
RGBA4444: 16,
};
KTX2Loader.EngineFormat = {
RGBAFormat: RGBAFormat,
RGBA_ASTC_4x4_Format: RGBA_ASTC_4x4_Format,
RGBA_BPTC_Format: RGBA_BPTC_Format,
RGBA_ETC2_EAC_Format: RGBA_ETC2_EAC_Format,
RGBA_PVRTC_4BPPV1_Format: RGBA_PVRTC_4BPPV1_Format,
RGBA_S3TC_DXT5_Format: RGBA_S3TC_DXT5_Format,
RGB_ETC1_Format: RGB_ETC1_Format,
RGB_ETC2_Format: RGB_ETC2_Format,
RGB_PVRTC_4BPPV1_Format: RGB_PVRTC_4BPPV1_Format,
RGBA_S3TC_DXT1_Format: RGBA_S3TC_DXT1_Format,
};
/* WEB WORKER */
KTX2Loader.BasisWorker = function () {
let config;
let transcoderPending;
let BasisModule;
const EngineFormat = _EngineFormat; // eslint-disable-line no-undef
const TranscoderFormat = _TranscoderFormat; // eslint-disable-line no-undef
const BasisFormat = _BasisFormat; // eslint-disable-line no-undef
self.addEventListener( 'message', function ( e ) {
const message = e.data;
switch ( message.type ) {
case 'init':
config = message.config;
init( message.transcoderBinary );
break;
case 'transcode':
transcoderPending.then( () => {
try {
const { faces, buffers, width, height, hasAlpha, format, dfdFlags } = transcode( message.buffer );
self.postMessage( { type: 'transcode', id: message.id, faces, width, height, hasAlpha, format, dfdFlags }, buffers );
} catch ( error ) {
console.error( error );
self.postMessage( { type: 'error', id: message.id, error: error.message } );
}
} );
break;
}
} );
function init( wasmBinary ) {
transcoderPending = new Promise( ( resolve ) => {
BasisModule = { wasmBinary, onRuntimeInitialized: resolve };
BASIS( BasisModule ); // eslint-disable-line no-undef
} ).then( () => {
BasisModule.initializeBasis();
if ( BasisModule.KTX2File === undefined ) {
console.warn( 'THREE.KTX2Loader: Please update Basis Universal transcoder.' );
}
} );
}
function transcode( buffer ) {
const ktx2File = new BasisModule.KTX2File( new Uint8Array( buffer ) );
function cleanup() {
ktx2File.close();
ktx2File.delete();
}
if ( ! ktx2File.isValid() ) {
cleanup();
throw new Error( 'THREE.KTX2Loader: Invalid or unsupported .ktx2 file' );
}
const basisFormat = ktx2File.isUASTC() ? BasisFormat.UASTC_4x4 : BasisFormat.ETC1S;
const width = ktx2File.getWidth();
const height = ktx2File.getHeight();
const layerCount = ktx2File.getLayers() || 1;
const levelCount = ktx2File.getLevels();
const faceCount = ktx2File.getFaces();
const hasAlpha = ktx2File.getHasAlpha();
const dfdFlags = ktx2File.getDFDFlags();
const { transcoderFormat, engineFormat } = getTranscoderFormat( basisFormat, width, height, hasAlpha );
if ( ! width || ! height || ! levelCount ) {
cleanup();
throw new Error( 'THREE.KTX2Loader: Invalid texture' );
}
if ( ! ktx2File.startTranscoding() ) {
cleanup();
throw new Error( 'THREE.KTX2Loader: .startTranscoding failed' );
}
const faces = [];
const buffers = [];
for ( let face = 0; face < faceCount; face ++ ) {
const mipmaps = [];
for ( let mip = 0; mip < levelCount; mip ++ ) {
const layerMips = [];
let mipWidth, mipHeight;
for ( let layer = 0; layer < layerCount; layer ++ ) {
const levelInfo = ktx2File.getImageLevelInfo( mip, layer, face );
if ( face === 0 && mip === 0 && layer === 0 && ( levelInfo.origWidth % 4 !== 0 || levelInfo.origHeight % 4 !== 0 ) ) {
console.warn( 'THREE.KTX2Loader: ETC1S and UASTC textures should use multiple-of-four dimensions.' );
}
if ( levelCount > 1 ) {
mipWidth = levelInfo.origWidth;
mipHeight = levelInfo.origHeight;
} else {
// Handles non-multiple-of-four dimensions in textures without mipmaps. Textures with
// mipmaps must use multiple-of-four dimensions, for some texture formats and APIs.
// See mrdoob/three.js#25908.
mipWidth = levelInfo.width;
mipHeight = levelInfo.height;
}
const dst = new Uint8Array( ktx2File.getImageTranscodedSizeInBytes( mip, layer, 0, transcoderFormat ) );
const status = ktx2File.transcodeImage( dst, mip, layer, face, transcoderFormat, 0, - 1, - 1 );
if ( ! status ) {
cleanup();
throw new Error( 'THREE.KTX2Loader: .transcodeImage failed.' );
}
layerMips.push( dst );
}
const mipData = concat( layerMips );
mipmaps.push( { data: mipData, width: mipWidth, height: mipHeight } );
buffers.push( mipData.buffer );
}
faces.push( { mipmaps, width, height, format: engineFormat } );
}
cleanup();
return { faces, buffers, width, height, hasAlpha, format: engineFormat, dfdFlags };
}
//
// Optimal choice of a transcoder target format depends on the Basis format (ETC1S or UASTC),
// device capabilities, and texture dimensions. The list below ranks the formats separately
// for ETC1S and UASTC.
//
// In some cases, transcoding UASTC to RGBA32 might be preferred for higher quality (at
// significant memory cost) compared to ETC1/2, BC1/3, and PVRTC. The transcoder currently
// chooses RGBA32 only as a last resort and does not expose that option to the caller.
const FORMAT_OPTIONS = [
{
if: 'astcSupported',
basisFormat: [ BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.ASTC_4x4, TranscoderFormat.ASTC_4x4 ],
engineFormat: [ EngineFormat.RGBA_ASTC_4x4_Format, EngineFormat.RGBA_ASTC_4x4_Format ],
priorityETC1S: Infinity,
priorityUASTC: 1,
needsPowerOfTwo: false,
},
{
if: 'bptcSupported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.BC7_M5, TranscoderFormat.BC7_M5 ],
engineFormat: [ EngineFormat.RGBA_BPTC_Format, EngineFormat.RGBA_BPTC_Format ],
priorityETC1S: 3,
priorityUASTC: 2,
needsPowerOfTwo: false,
},
{
if: 'dxtSupported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.BC1, TranscoderFormat.BC3 ],
engineFormat: [ EngineFormat.RGBA_S3TC_DXT1_Format, EngineFormat.RGBA_S3TC_DXT5_Format ],
priorityETC1S: 4,
priorityUASTC: 5,
needsPowerOfTwo: false,
},
{
if: 'etc2Supported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.ETC1, TranscoderFormat.ETC2 ],
engineFormat: [ EngineFormat.RGB_ETC2_Format, EngineFormat.RGBA_ETC2_EAC_Format ],
priorityETC1S: 1,
priorityUASTC: 3,
needsPowerOfTwo: false,
},
{
if: 'etc1Supported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.ETC1 ],
engineFormat: [ EngineFormat.RGB_ETC1_Format ],
priorityETC1S: 2,
priorityUASTC: 4,
needsPowerOfTwo: false,
},
{
if: 'pvrtcSupported',
basisFormat: [ BasisFormat.ETC1S, BasisFormat.UASTC_4x4 ],
transcoderFormat: [ TranscoderFormat.PVRTC1_4_RGB, TranscoderFormat.PVRTC1_4_RGBA ],
engineFormat: [ EngineFormat.RGB_PVRTC_4BPPV1_Format, EngineFormat.RGBA_PVRTC_4BPPV1_Format ],
priorityETC1S: 5,
priorityUASTC: 6,
needsPowerOfTwo: true,
},
];
const ETC1S_OPTIONS = FORMAT_OPTIONS.sort( function ( a, b ) {
return a.priorityETC1S - b.priorityETC1S;
} );
const UASTC_OPTIONS = FORMAT_OPTIONS.sort( function ( a, b ) {
return a.priorityUASTC - b.priorityUASTC;
} );
function getTranscoderFormat( basisFormat, width, height, hasAlpha ) {
let transcoderFormat;
let engineFormat;
const options = basisFormat === BasisFormat.ETC1S ? ETC1S_OPTIONS : UASTC_OPTIONS;
for ( let i = 0; i < options.length; i ++ ) {
const opt = options[ i ];
if ( ! config[ opt.if ] ) continue;
if ( ! opt.basisFormat.includes( basisFormat ) ) continue;
if ( hasAlpha && opt.transcoderFormat.length < 2 ) continue;
if ( opt.needsPowerOfTwo && ! ( isPowerOfTwo( width ) && isPowerOfTwo( height ) ) ) continue;
transcoderFormat = opt.transcoderFormat[ hasAlpha ? 1 : 0 ];
engineFormat = opt.engineFormat[ hasAlpha ? 1 : 0 ];
return { transcoderFormat, engineFormat };
}
console.warn( 'THREE.KTX2Loader: No suitable compressed texture format found. Decoding to RGBA32.' );
transcoderFormat = TranscoderFormat.RGBA32;
engineFormat = EngineFormat.RGBAFormat;
return { transcoderFormat, engineFormat };
}
function isPowerOfTwo( value ) {
if ( value <= 2 ) return true;
return ( value & ( value - 1 ) ) === 0 && value !== 0;
}
/** Concatenates N byte arrays. */
function concat( arrays ) {
if ( arrays.length === 1 ) return arrays[ 0 ];
let totalByteLength = 0;
for ( let i = 0; i < arrays.length; i ++ ) {
const array = arrays[ i ];
totalByteLength += array.byteLength;
}
const result = new Uint8Array( totalByteLength );
let byteOffset = 0;
for ( let i = 0; i < arrays.length; i ++ ) {
const array = arrays[ i ];
result.set( array, byteOffset );
byteOffset += array.byteLength;
}
return result;
}
};
//
// Parsing for non-Basis textures. These textures are may have supercompression
// like Zstd, but they do not require transcoding.
const UNCOMPRESSED_FORMATS = new Set( [ RGBAFormat, RGFormat, RedFormat ] );
const FORMAT_MAP = {
[ VK_FORMAT_R32G32B32A32_SFLOAT ]: RGBAFormat,
[ VK_FORMAT_R16G16B16A16_SFLOAT ]: RGBAFormat,
[ VK_FORMAT_R8G8B8A8_UNORM ]: RGBAFormat,
[ VK_FORMAT_R8G8B8A8_SRGB ]: RGBAFormat,
[ VK_FORMAT_R32G32_SFLOAT ]: RGFormat,
[ VK_FORMAT_R16G16_SFLOAT ]: RGFormat,
[ VK_FORMAT_R8G8_UNORM ]: RGFormat,
[ VK_FORMAT_R8G8_SRGB ]: RGFormat,
[ VK_FORMAT_R32_SFLOAT ]: RedFormat,
[ VK_FORMAT_R16_SFLOAT ]: RedFormat,
[ VK_FORMAT_R8_SRGB ]: RedFormat,
[ VK_FORMAT_R8_UNORM ]: RedFormat,
[ VK_FORMAT_ASTC_6x6_SRGB_BLOCK ]: RGBA_ASTC_6x6_Format,
[ VK_FORMAT_ASTC_6x6_UNORM_BLOCK ]: RGBA_ASTC_6x6_Format,
};
const TYPE_MAP = {
[ VK_FORMAT_R32G32B32A32_SFLOAT ]: FloatType,
[ VK_FORMAT_R16G16B16A16_SFLOAT ]: HalfFloatType,
[ VK_FORMAT_R8G8B8A8_UNORM ]: UnsignedByteType,
[ VK_FORMAT_R8G8B8A8_SRGB ]: UnsignedByteType,
[ VK_FORMAT_R32G32_SFLOAT ]: FloatType,
[ VK_FORMAT_R16G16_SFLOAT ]: HalfFloatType,
[ VK_FORMAT_R8G8_UNORM ]: UnsignedByteType,
[ VK_FORMAT_R8G8_SRGB ]: UnsignedByteType,
[ VK_FORMAT_R32_SFLOAT ]: FloatType,
[ VK_FORMAT_R16_SFLOAT ]: HalfFloatType,
[ VK_FORMAT_R8_SRGB ]: UnsignedByteType,
[ VK_FORMAT_R8_UNORM ]: UnsignedByteType,
[ VK_FORMAT_ASTC_6x6_SRGB_BLOCK ]: UnsignedByteType,
[ VK_FORMAT_ASTC_6x6_UNORM_BLOCK ]: UnsignedByteType,
};
async function createRawTexture( container ) {
const { vkFormat } = container;
if ( FORMAT_MAP[ vkFormat ] === undefined ) {
throw new Error( 'THREE.KTX2Loader: Unsupported vkFormat.' );
}
//
let zstd;
if ( container.supercompressionScheme === KHR_SUPERCOMPRESSION_ZSTD ) {
if ( ! _zstd ) {
_zstd = new Promise( async ( resolve ) => {
const zstd = new ZSTDDecoder();
await zstd.init();
resolve( zstd );
} );
}
zstd = await _zstd;
}
//
const mipmaps = [];
for ( let levelIndex = 0; levelIndex < container.levels.length; levelIndex ++ ) {
const levelWidth = Math.max( 1, container.pixelWidth >> levelIndex );
const levelHeight = Math.max( 1, container.pixelHeight >> levelIndex );
const levelDepth = container.pixelDepth ? Math.max( 1, container.pixelDepth >> levelIndex ) : 0;
const level = container.levels[ levelIndex ];
let levelData;
if ( container.supercompressionScheme === KHR_SUPERCOMPRESSION_NONE ) {
levelData = level.levelData;
} else if ( container.supercompressionScheme === KHR_SUPERCOMPRESSION_ZSTD ) {
levelData = zstd.decode( level.levelData, level.uncompressedByteLength );
} else {
throw new Error( 'THREE.KTX2Loader: Unsupported supercompressionScheme.' );
}
let data;
if ( TYPE_MAP[ vkFormat ] === FloatType ) {
data = new Float32Array(
levelData.buffer,
levelData.byteOffset,
levelData.byteLength / Float32Array.BYTES_PER_ELEMENT
);
} else if ( TYPE_MAP[ vkFormat ] === HalfFloatType ) {
data = new Uint16Array(
levelData.buffer,
levelData.byteOffset,
levelData.byteLength / Uint16Array.BYTES_PER_ELEMENT
);
} else {
data = levelData;
}
mipmaps.push( {
data: data,
width: levelWidth,
height: levelHeight,
depth: levelDepth,
} );
}
let texture;
if ( UNCOMPRESSED_FORMATS.has( FORMAT_MAP[ vkFormat ] ) ) {
texture = container.pixelDepth === 0
? new DataTexture( mipmaps[ 0 ].data, container.pixelWidth, container.pixelHeight )
: new Data3DTexture( mipmaps[ 0 ].data, container.pixelWidth, container.pixelHeight, container.pixelDepth );
} else {
if ( container.pixelDepth > 0 ) throw new Error( 'THREE.KTX2Loader: Unsupported pixelDepth.' );
texture = new CompressedTexture( mipmaps, container.pixelWidth, container.pixelHeight );
}
texture.mipmaps = mipmaps;
texture.type = TYPE_MAP[ vkFormat ];
texture.format = FORMAT_MAP[ vkFormat ];
texture.colorSpace = parseColorSpace( container );
texture.needsUpdate = true;
//
return Promise.resolve( texture );
}
function parseColorSpace( container ) {
const dfd = container.dataFormatDescriptor[ 0 ];
if ( dfd.colorPrimaries === KHR_DF_PRIMARIES_BT709 ) {
return dfd.transferFunction === KHR_DF_TRANSFER_SRGB ? SRGBColorSpace : LinearSRGBColorSpace;
} else if ( dfd.colorPrimaries === KHR_DF_PRIMARIES_DISPLAYP3 ) {
return dfd.transferFunction === KHR_DF_TRANSFER_SRGB ? DisplayP3ColorSpace : LinearDisplayP3ColorSpace;
} else if ( dfd.colorPrimaries === KHR_DF_PRIMARIES_UNSPECIFIED ) {
return NoColorSpace;
} else {
console.warn( `THREE.KTX2Loader: Unsupported color primaries, "${ dfd.colorPrimaries }"` );
return NoColorSpace;
}
}
export { KTX2Loader };

View File

@ -0,0 +1,176 @@
import {
CompressedTextureLoader
} from '/static/javascript/three/build/three.module.js';
/**
* for description see https://www.khronos.org/opengles/sdk/tools/KTX/
* for file layout see https://www.khronos.org/opengles/sdk/tools/KTX/file_format_spec/
*
* ported from https://github.com/BabylonJS/Babylon.js/blob/master/src/Misc/khronosTextureContainer.ts
*/
class KTXLoader extends CompressedTextureLoader {
constructor( manager ) {
super( manager );
}
parse( buffer, loadMipmaps ) {
const ktx = new KhronosTextureContainer( buffer, 1 );
return {
mipmaps: ktx.mipmaps( loadMipmaps ),
width: ktx.pixelWidth,
height: ktx.pixelHeight,
format: ktx.glInternalFormat,
isCubemap: ktx.numberOfFaces === 6,
mipmapCount: ktx.numberOfMipmapLevels
};
}
}
const HEADER_LEN = 12 + ( 13 * 4 ); // identifier + header elements (not including key value meta-data pairs)
// load types
const COMPRESSED_2D = 0; // uses a gl.compressedTexImage2D()
//const COMPRESSED_3D = 1; // uses a gl.compressedTexImage3D()
//const TEX_2D = 2; // uses a gl.texImage2D()
//const TEX_3D = 3; // uses a gl.texImage3D()
class KhronosTextureContainer {
/**
* @param {ArrayBuffer} arrayBuffer- contents of the KTX container file
* @param {number} facesExpected- should be either 1 or 6, based whether a cube texture or or
* @param {boolean} threeDExpected- provision for indicating that data should be a 3D texture, not implemented
* @param {boolean} textureArrayExpected- provision for indicating that data should be a texture array, not implemented
*/
constructor( arrayBuffer, facesExpected /*, threeDExpected, textureArrayExpected */ ) {
this.arrayBuffer = arrayBuffer;
// Test that it is a ktx formatted file, based on the first 12 bytes, character representation is:
// '´', 'K', 'T', 'X', ' ', '1', '1', 'ª', '\r', '\n', '\x1A', '\n'
// 0xAB, 0x4B, 0x54, 0x58, 0x20, 0x31, 0x31, 0xBB, 0x0D, 0x0A, 0x1A, 0x0A
const identifier = new Uint8Array( this.arrayBuffer, 0, 12 );
if ( identifier[ 0 ] !== 0xAB ||
identifier[ 1 ] !== 0x4B ||
identifier[ 2 ] !== 0x54 ||
identifier[ 3 ] !== 0x58 ||
identifier[ 4 ] !== 0x20 ||
identifier[ 5 ] !== 0x31 ||
identifier[ 6 ] !== 0x31 ||
identifier[ 7 ] !== 0xBB ||
identifier[ 8 ] !== 0x0D ||
identifier[ 9 ] !== 0x0A ||
identifier[ 10 ] !== 0x1A ||
identifier[ 11 ] !== 0x0A ) {
console.error( 'texture missing KTX identifier' );
return;
}
// load the reset of the header in native 32 bit uint
const dataSize = Uint32Array.BYTES_PER_ELEMENT;
const headerDataView = new DataView( this.arrayBuffer, 12, 13 * dataSize );
const endianness = headerDataView.getUint32( 0, true );
const littleEndian = endianness === 0x04030201;
this.glType = headerDataView.getUint32( 1 * dataSize, littleEndian ); // must be 0 for compressed textures
this.glTypeSize = headerDataView.getUint32( 2 * dataSize, littleEndian ); // must be 1 for compressed textures
this.glFormat = headerDataView.getUint32( 3 * dataSize, littleEndian ); // must be 0 for compressed textures
this.glInternalFormat = headerDataView.getUint32( 4 * dataSize, littleEndian ); // the value of arg passed to gl.compressedTexImage2D(,,x,,,,)
this.glBaseInternalFormat = headerDataView.getUint32( 5 * dataSize, littleEndian ); // specify GL_RGB, GL_RGBA, GL_ALPHA, etc (un-compressed only)
this.pixelWidth = headerDataView.getUint32( 6 * dataSize, littleEndian ); // level 0 value of arg passed to gl.compressedTexImage2D(,,,x,,,)
this.pixelHeight = headerDataView.getUint32( 7 * dataSize, littleEndian ); // level 0 value of arg passed to gl.compressedTexImage2D(,,,,x,,)
this.pixelDepth = headerDataView.getUint32( 8 * dataSize, littleEndian ); // level 0 value of arg passed to gl.compressedTexImage3D(,,,,,x,,)
this.numberOfArrayElements = headerDataView.getUint32( 9 * dataSize, littleEndian ); // used for texture arrays
this.numberOfFaces = headerDataView.getUint32( 10 * dataSize, littleEndian ); // used for cubemap textures, should either be 1 or 6
this.numberOfMipmapLevels = headerDataView.getUint32( 11 * dataSize, littleEndian ); // number of levels; disregard possibility of 0 for compressed textures
this.bytesOfKeyValueData = headerDataView.getUint32( 12 * dataSize, littleEndian ); // the amount of space after the header for meta-data
// Make sure we have a compressed type. Not only reduces work, but probably better to let dev know they are not compressing.
if ( this.glType !== 0 ) {
console.warn( 'only compressed formats currently supported' );
return;
} else {
// value of zero is an indication to generate mipmaps @ runtime. Not usually allowed for compressed, so disregard.
this.numberOfMipmapLevels = Math.max( 1, this.numberOfMipmapLevels );
}
if ( this.pixelHeight === 0 || this.pixelDepth !== 0 ) {
console.warn( 'only 2D textures currently supported' );
return;
}
if ( this.numberOfArrayElements !== 0 ) {
console.warn( 'texture arrays not currently supported' );
return;
}
if ( this.numberOfFaces !== facesExpected ) {
console.warn( 'number of faces expected' + facesExpected + ', but found ' + this.numberOfFaces );
return;
}
// we now have a completely validated file, so could use existence of loadType as success
// would need to make this more elaborate & adjust checks above to support more than one load type
this.loadType = COMPRESSED_2D;
}
mipmaps( loadMipmaps ) {
const mipmaps = [];
// initialize width & height for level 1
let dataOffset = HEADER_LEN + this.bytesOfKeyValueData;
let width = this.pixelWidth;
let height = this.pixelHeight;
const mipmapCount = loadMipmaps ? this.numberOfMipmapLevels : 1;
for ( let level = 0; level < mipmapCount; level ++ ) {
const imageSize = new Int32Array( this.arrayBuffer, dataOffset, 1 )[ 0 ]; // size per face, since not supporting array cubemaps
dataOffset += 4; // size of the image + 4 for the imageSize field
for ( let face = 0; face < this.numberOfFaces; face ++ ) {
const byteArray = new Uint8Array( this.arrayBuffer, dataOffset, imageSize );
mipmaps.push( { 'data': byteArray, 'width': width, 'height': height } );
dataOffset += imageSize;
dataOffset += 3 - ( ( imageSize + 3 ) % 4 ); // add padding for odd sized image
}
width = Math.max( 1.0, width * 0.5 );
height = Math.max( 1.0, height * 0.5 );
}
return mipmaps;
}
}
export { KTXLoader };

View File

@ -0,0 +1,58 @@
/**
* Kaleidoscope Shader
* Radial reflection around center point
* Ported from: http://pixelshaders.com/editor/
* by Toby Schachman / http://tobyschachman.com/
*
* sides: number of reflections
* angle: initial angle in radians
*/
const KaleidoShader = {
name: 'KaleidoShader',
uniforms: {
'tDiffuse': { value: null },
'sides': { value: 6.0 },
'angle': { value: 0.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform float sides;
uniform float angle;
varying vec2 vUv;
void main() {
vec2 p = vUv - 0.5;
float r = length(p);
float a = atan(p.y, p.x) + angle;
float tau = 2. * 3.1416 ;
a = mod(a, tau/sides);
a = abs(a - tau/sides/2.) ;
p = r * vec2(cos(a), sin(a));
vec4 color = texture2D(tDiffuse, p + 0.5);
gl_FragColor = color;
}`
};
export { KaleidoShader };

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,202 @@
import {
BufferAttribute,
BufferGeometry,
Group,
LineSegments,
Matrix3,
Mesh
} from '/static/javascript/three/build/three.module.js';
import { mergeGeometries } from './BufferGeometryUtils.js';
class LDrawUtils {
static mergeObject( object ) {
// Merges geometries in object by materials and returns new object. Use on not indexed geometries.
// The object buffers reference the old object ones.
// Special treatment is done to the conditional lines generated by LDrawLoader.
function extractGroup( geometry, group, elementSize, isConditionalLine ) {
// Extracts a group from a geometry as a new geometry (with attribute buffers referencing original buffers)
const newGeometry = new BufferGeometry();
const originalPositions = geometry.getAttribute( 'position' ).array;
const originalNormals = elementSize === 3 ? geometry.getAttribute( 'normal' ).array : null;
const numVertsGroup = Math.min( group.count, Math.floor( originalPositions.length / 3 ) - group.start );
const vertStart = group.start * 3;
const vertEnd = ( group.start + numVertsGroup ) * 3;
const positions = originalPositions.subarray( vertStart, vertEnd );
const normals = originalNormals !== null ? originalNormals.subarray( vertStart, vertEnd ) : null;
newGeometry.setAttribute( 'position', new BufferAttribute( positions, 3 ) );
if ( normals !== null ) newGeometry.setAttribute( 'normal', new BufferAttribute( normals, 3 ) );
if ( isConditionalLine ) {
const controlArray0 = geometry.getAttribute( 'control0' ).array.subarray( vertStart, vertEnd );
const controlArray1 = geometry.getAttribute( 'control1' ).array.subarray( vertStart, vertEnd );
const directionArray = geometry.getAttribute( 'direction' ).array.subarray( vertStart, vertEnd );
newGeometry.setAttribute( 'control0', new BufferAttribute( controlArray0, 3, false ) );
newGeometry.setAttribute( 'control1', new BufferAttribute( controlArray1, 3, false ) );
newGeometry.setAttribute( 'direction', new BufferAttribute( directionArray, 3, false ) );
}
return newGeometry;
}
function addGeometry( mat, geometry, geometries ) {
const geoms = geometries[ mat.uuid ];
if ( ! geoms ) {
geometries[ mat.uuid ] = {
mat: mat,
arr: [ geometry ]
};
} else {
geoms.arr.push( geometry );
}
}
function permuteAttribute( attribute, elemSize ) {
// Permutes first two vertices of each attribute element
if ( ! attribute ) return;
const verts = attribute.array;
const numVerts = Math.floor( verts.length / 3 );
let offset = 0;
for ( let i = 0; i < numVerts; i ++ ) {
const x = verts[ offset ];
const y = verts[ offset + 1 ];
const z = verts[ offset + 2 ];
verts[ offset ] = verts[ offset + 3 ];
verts[ offset + 1 ] = verts[ offset + 4 ];
verts[ offset + 2 ] = verts[ offset + 5 ];
verts[ offset + 3 ] = x;
verts[ offset + 4 ] = y;
verts[ offset + 5 ] = z;
offset += elemSize * 3;
}
}
// Traverse the object hierarchy collecting geometries and transforming them to world space
const meshGeometries = {};
const linesGeometries = {};
const condLinesGeometries = {};
object.updateMatrixWorld( true );
const normalMatrix = new Matrix3();
object.traverse( c => {
if ( c.isMesh | c.isLineSegments ) {
const elemSize = c.isMesh ? 3 : 2;
const geometry = c.geometry.clone();
const matrixIsInverted = c.matrixWorld.determinant() < 0;
if ( matrixIsInverted ) {
permuteAttribute( geometry.attributes.position, elemSize );
permuteAttribute( geometry.attributes.normal, elemSize );
}
geometry.applyMatrix4( c.matrixWorld );
if ( c.isConditionalLine ) {
geometry.attributes.control0.applyMatrix4( c.matrixWorld );
geometry.attributes.control1.applyMatrix4( c.matrixWorld );
normalMatrix.getNormalMatrix( c.matrixWorld );
geometry.attributes.direction.applyNormalMatrix( normalMatrix );
}
const geometries = c.isMesh ? meshGeometries : ( c.isConditionalLine ? condLinesGeometries : linesGeometries );
if ( Array.isArray( c.material ) ) {
for ( const groupIndex in geometry.groups ) {
const group = geometry.groups[ groupIndex ];
const mat = c.material[ group.materialIndex ];
const newGeometry = extractGroup( geometry, group, elemSize, c.isConditionalLine );
addGeometry( mat, newGeometry, geometries );
}
} else {
addGeometry( c.material, geometry, geometries );
}
}
} );
// Create object with merged geometries
const mergedObject = new Group();
const meshMaterialsIds = Object.keys( meshGeometries );
for ( const meshMaterialsId of meshMaterialsIds ) {
const meshGeometry = meshGeometries[ meshMaterialsId ];
const mergedGeometry = mergeGeometries( meshGeometry.arr );
mergedObject.add( new Mesh( mergedGeometry, meshGeometry.mat ) );
}
const linesMaterialsIds = Object.keys( linesGeometries );
for ( const linesMaterialsId of linesMaterialsIds ) {
const lineGeometry = linesGeometries[ linesMaterialsId ];
const mergedGeometry = mergeGeometries( lineGeometry.arr );
mergedObject.add( new LineSegments( mergedGeometry, lineGeometry.mat ) );
}
const condLinesMaterialsIds = Object.keys( condLinesGeometries );
for ( const condLinesMaterialsId of condLinesMaterialsIds ) {
const condLineGeometry = condLinesGeometries[ condLinesMaterialsId ];
const mergedGeometry = mergeGeometries( condLineGeometry.arr );
const condLines = new LineSegments( mergedGeometry, condLineGeometry.mat );
condLines.isConditionalLine = true;
mergedObject.add( condLines );
}
mergedObject.userData.constructionStep = 0;
mergedObject.userData.numConstructionSteps = 1;
return mergedObject;
}
}
export { LDrawUtils };

View File

@ -0,0 +1,168 @@
// http://download.autodesk.com/us/systemdocs/help/2011/lustre/index.html?url=./files/WSc4e151a45a3b785a24c3d9a411df9298473-7ffd.htm,topicNumber=d0e9492
// https://community.foundry.com/discuss/topic/103636/format-spec-for-3dl?mode=Post&postID=895258
import {
ClampToEdgeWrapping,
Data3DTexture,
FileLoader,
FloatType,
LinearFilter,
Loader,
RGBAFormat,
UnsignedByteType,
} from '/static/javascript/three/build/three.module.js';
export class LUT3dlLoader extends Loader {
constructor( manager ) {
super( manager );
this.type = UnsignedByteType;
}
setType( type ) {
if ( type !== UnsignedByteType && type !== FloatType ) {
throw new Error( 'LUT3dlLoader: Unsupported type' );
}
this.type = type;
return this;
}
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'text' );
loader.load( url, text => {
try {
onLoad( this.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
this.manager.itemError( url );
}
}, onProgress, onError );
}
parse( input ) {
const regExpGridInfo = /^[\d ]+$/m;
const regExpDataPoints = /^([\d.e+-]+) +([\d.e+-]+) +([\d.e+-]+) *$/gm;
// The first line describes the positions of values on the LUT grid.
let result = regExpGridInfo.exec( input );
if ( result === null ) {
throw new Error( 'LUT3dlLoader: Missing grid information' );
}
const gridLines = result[ 0 ].trim().split( /\s+/g ).map( Number );
const gridStep = gridLines[ 1 ] - gridLines[ 0 ];
const size = gridLines.length;
const sizeSq = size ** 2;
for ( let i = 1, l = gridLines.length; i < l; ++ i ) {
if ( gridStep !== ( gridLines[ i ] - gridLines[ i - 1 ] ) ) {
throw new Error( 'LUT3dlLoader: Inconsistent grid size' );
}
}
const dataFloat = new Float32Array( size ** 3 * 4 );
let maxValue = 0.0;
let index = 0;
while ( ( result = regExpDataPoints.exec( input ) ) !== null ) {
const r = Number( result[ 1 ] );
const g = Number( result[ 2 ] );
const b = Number( result[ 3 ] );
maxValue = Math.max( maxValue, r, g, b );
const bLayer = index % size;
const gLayer = Math.floor( index / size ) % size;
const rLayer = Math.floor( index / ( sizeSq ) ) % size;
// b grows first, then g, then r.
const d4 = ( bLayer * sizeSq + gLayer * size + rLayer ) * 4;
dataFloat[ d4 + 0 ] = r;
dataFloat[ d4 + 1 ] = g;
dataFloat[ d4 + 2 ] = b;
++ index;
}
// Determine the bit depth to scale the values to [0.0, 1.0].
const bits = Math.ceil( Math.log2( maxValue ) );
const maxBitValue = Math.pow( 2, bits );
const data = this.type === UnsignedByteType ? new Uint8Array( dataFloat.length ) : dataFloat;
const scale = this.type === UnsignedByteType ? 255 : 1;
for ( let i = 0, l = data.length; i < l; i += 4 ) {
const i1 = i + 1;
const i2 = i + 2;
const i3 = i + 3;
// Note: data is dataFloat when type is FloatType.
data[ i ] = dataFloat[ i ] / maxBitValue * scale;
data[ i1 ] = dataFloat[ i1 ] / maxBitValue * scale;
data[ i2 ] = dataFloat[ i2 ] / maxBitValue * scale;
data[ i3 ] = scale;
}
const texture3D = new Data3DTexture();
texture3D.image.data = data;
texture3D.image.width = size;
texture3D.image.height = size;
texture3D.image.depth = size;
texture3D.format = RGBAFormat;
texture3D.type = this.type;
texture3D.magFilter = LinearFilter;
texture3D.minFilter = LinearFilter;
texture3D.wrapS = ClampToEdgeWrapping;
texture3D.wrapT = ClampToEdgeWrapping;
texture3D.wrapR = ClampToEdgeWrapping;
texture3D.generateMipmaps = false;
texture3D.needsUpdate = true;
return {
size,
texture3D,
};
}
}

View File

@ -0,0 +1,153 @@
// https://wwwimages2.adobe.com/content/dam/acom/en/products/speedgrade/cc/pdfs/cube-lut-specification-1.0.pdf
import {
ClampToEdgeWrapping,
Data3DTexture,
FileLoader,
FloatType,
LinearFilter,
Loader,
UnsignedByteType,
Vector3,
} from '/static/javascript/three/build/three.module.js';
export class LUTCubeLoader extends Loader {
constructor( manager ) {
super( manager );
this.type = UnsignedByteType;
}
setType( type ) {
if ( type !== UnsignedByteType && type !== FloatType ) {
throw new Error( 'LUTCubeLoader: Unsupported type' );
}
this.type = type;
return this;
}
load( url, onLoad, onProgress, onError ) {
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'text' );
loader.load( url, text => {
try {
onLoad( this.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
this.manager.itemError( url );
}
}, onProgress, onError );
}
parse( input ) {
const regExpTitle = /TITLE +"([^"]*)"/;
const regExpSize = /LUT_3D_SIZE +(\d+)/;
const regExpDomainMin = /DOMAIN_MIN +([\d.]+) +([\d.]+) +([\d.]+)/;
const regExpDomainMax = /DOMAIN_MAX +([\d.]+) +([\d.]+) +([\d.]+)/;
const regExpDataPoints = /^([\d.e+-]+) +([\d.e+-]+) +([\d.e+-]+) *$/gm;
let result = regExpTitle.exec( input );
const title = ( result !== null ) ? result[ 1 ] : null;
result = regExpSize.exec( input );
if ( result === null ) {
throw new Error( 'LUTCubeLoader: Missing LUT_3D_SIZE information' );
}
const size = Number( result[ 1 ] );
const length = size ** 3 * 4;
const data = this.type === UnsignedByteType ? new Uint8Array( length ) : new Float32Array( length );
const domainMin = new Vector3( 0, 0, 0 );
const domainMax = new Vector3( 1, 1, 1 );
result = regExpDomainMin.exec( input );
if ( result !== null ) {
domainMin.set( Number( result[ 1 ] ), Number( result[ 2 ] ), Number( result[ 3 ] ) );
}
result = regExpDomainMax.exec( input );
if ( result !== null ) {
domainMax.set( Number( result[ 1 ] ), Number( result[ 2 ] ), Number( result[ 3 ] ) );
}
if ( domainMin.x > domainMax.x || domainMin.y > domainMax.y || domainMin.z > domainMax.z ) {
throw new Error( 'LUTCubeLoader: Invalid input domain' );
}
const scale = this.type === UnsignedByteType ? 255 : 1;
let i = 0;
while ( ( result = regExpDataPoints.exec( input ) ) !== null ) {
data[ i ++ ] = Number( result[ 1 ] ) * scale;
data[ i ++ ] = Number( result[ 2 ] ) * scale;
data[ i ++ ] = Number( result[ 3 ] ) * scale;
data[ i ++ ] = scale;
}
const texture3D = new Data3DTexture();
texture3D.image.data = data;
texture3D.image.width = size;
texture3D.image.height = size;
texture3D.image.depth = size;
texture3D.type = this.type;
texture3D.magFilter = LinearFilter;
texture3D.minFilter = LinearFilter;
texture3D.wrapS = ClampToEdgeWrapping;
texture3D.wrapT = ClampToEdgeWrapping;
texture3D.wrapR = ClampToEdgeWrapping;
texture3D.generateMipmaps = false;
texture3D.needsUpdate = true;
return {
title,
size,
domainMin,
domainMax,
texture3D,
};
}
}

View File

@ -0,0 +1,149 @@
import {
Loader,
TextureLoader,
Data3DTexture,
RGBAFormat,
UnsignedByteType,
ClampToEdgeWrapping,
LinearFilter,
} from '/static/javascript/three/build/three.module.js';
export class LUTImageLoader extends Loader {
constructor( flipVertical = false ) {
//The NeutralLUT.png has green at the bottom for Unreal ang green at the top for Unity URP Color Lookup
//post-processing. If you're using lut image strips from a Unity pipeline then pass true to the constructor
super();
this.flip = flipVertical;
}
load( url, onLoad, onProgress, onError ) {
const loader = new TextureLoader( this.manager );
loader.setCrossOrigin( this.crossOrigin );
loader.setPath( this.path );
loader.load( url, texture => {
try {
let imageData;
if ( texture.image.width < texture.image.height ) {
imageData = this.getImageData( texture );
} else {
imageData = this.horz2Vert( texture );
}
onLoad( this.parse( imageData.data, Math.min( texture.image.width, texture.image.height ) ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
this.manager.itemError( url );
}
}, onProgress, onError );
}
getImageData( texture ) {
const width = texture.image.width;
const height = texture.image.height;
const canvas = document.createElement( 'canvas' );
canvas.width = width;
canvas.height = height;
const context = canvas.getContext( '2d' );
if ( this.flip === true ) {
context.scale( 1, - 1 );
context.translate( 0, - height );
}
context.drawImage( texture.image, 0, 0 );
return context.getImageData( 0, 0, width, height );
}
horz2Vert( texture ) {
const width = texture.image.height;
const height = texture.image.width;
const canvas = document.createElement( 'canvas' );
canvas.width = width;
canvas.height = height;
const context = canvas.getContext( '2d' );
if ( this.flip === true ) {
context.scale( 1, - 1 );
context.translate( 0, - height );
}
for ( let i = 0; i < width; i ++ ) {
const sy = i * width;
const dy = ( this.flip ) ? height - i * width : i * width;
context.drawImage( texture.image, sy, 0, width, width, 0, dy, width, width );
}
return context.getImageData( 0, 0, width, height );
}
parse( dataArray, size ) {
const data = new Uint8Array( dataArray );
const texture3D = new Data3DTexture();
texture3D.image.data = data;
texture3D.image.width = size;
texture3D.image.height = size;
texture3D.image.depth = size;
texture3D.format = RGBAFormat;
texture3D.type = UnsignedByteType;
texture3D.magFilter = LinearFilter;
texture3D.minFilter = LinearFilter;
texture3D.wrapS = ClampToEdgeWrapping;
texture3D.wrapT = ClampToEdgeWrapping;
texture3D.wrapR = ClampToEdgeWrapping;
texture3D.generateMipmaps = false;
texture3D.needsUpdate = true;
return {
size,
texture3D,
};
}
}

View File

@ -0,0 +1,108 @@
import { ShaderPass } from './ShaderPass.js';
const LUTShader = {
name: 'LUTShader',
uniforms: {
lut: { value: null },
lutSize: { value: 0 },
tDiffuse: { value: null },
intensity: { value: 1.0 },
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`,
fragmentShader: /* glsl */`
uniform float lutSize;
uniform sampler3D lut;
varying vec2 vUv;
uniform float intensity;
uniform sampler2D tDiffuse;
void main() {
vec4 val = texture2D( tDiffuse, vUv );
vec4 lutVal;
// pull the sample in by half a pixel so the sample begins
// at the center of the edge pixels.
float pixelWidth = 1.0 / lutSize;
float halfPixelWidth = 0.5 / lutSize;
vec3 uvw = vec3( halfPixelWidth ) + val.rgb * ( 1.0 - pixelWidth );
lutVal = vec4( texture( lut, uvw ).rgb, val.a );
gl_FragColor = vec4( mix( val, lutVal, intensity ) );
}
`,
};
class LUTPass extends ShaderPass {
set lut( v ) {
const material = this.material;
if ( v !== this.lut ) {
material.uniforms.lut.value = null;
if ( v ) {
material.uniforms.lutSize.value = v.image.width;
material.uniforms.lut.value = v;
}
}
}
get lut() {
return this.material.uniforms.lut.value;
}
set intensity( v ) {
this.material.uniforms.intensity.value = v;
}
get intensity() {
return this.material.uniforms.intensity.value;
}
constructor( options = {} ) {
super( LUTShader );
this.lut = options.lut || null;
this.intensity = 'intensity' in options ? options.intensity : 1;
}
}
export { LUTPass };

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,606 @@
import {
DataUtils,
DataTextureLoader,
FloatType,
HalfFloatType,
RGBAFormat
} from '/static/javascript/three/build/three.module.js';
class LogLuvLoader extends DataTextureLoader {
constructor( manager ) {
super( manager );
this.type = HalfFloatType;
}
parse( buffer ) {
const ifds = UTIF.decode( buffer );
UTIF.decodeImage( buffer, ifds[ 0 ] );
const rgba = UTIF.toRGBA( ifds[ 0 ], this.type );
return {
width: ifds[ 0 ].width,
height: ifds[ 0 ].height,
data: rgba,
format: RGBAFormat,
type: this.type,
flipY: true
};
}
setDataType( value ) {
this.type = value;
return this;
}
}
// from https://github.com/photopea/UTIF.js (MIT License)
const UTIF = {};
UTIF.decode = function ( buff, prm ) {
if ( prm == null ) prm = { parseMN: true, debug: false }; // read MakerNote, debug
var data = new Uint8Array( buff ), offset = 0;
var id = UTIF._binBE.readASCII( data, offset, 2 ); offset += 2;
var bin = id == 'II' ? UTIF._binLE : UTIF._binBE;
bin.readUshort( data, offset ); offset += 2;
var ifdo = bin.readUint( data, offset );
var ifds = [];
while ( true ) {
var cnt = bin.readUshort( data, ifdo ), typ = bin.readUshort( data, ifdo + 4 ); if ( cnt != 0 ) if ( typ < 1 || 13 < typ ) {
console.log( 'error in TIFF' ); break;
}
UTIF._readIFD( bin, data, ifdo, ifds, 0, prm );
ifdo = bin.readUint( data, ifdo + 2 + cnt * 12 );
if ( ifdo == 0 ) break;
}
return ifds;
};
UTIF.decodeImage = function ( buff, img, ifds ) {
if ( img.data ) return;
var data = new Uint8Array( buff );
var id = UTIF._binBE.readASCII( data, 0, 2 );
if ( img[ 't256' ] == null ) return; // No width => probably not an image
img.isLE = id == 'II';
img.width = img[ 't256' ][ 0 ]; //delete img["t256"];
img.height = img[ 't257' ][ 0 ]; //delete img["t257"];
var cmpr = img[ 't259' ] ? img[ 't259' ][ 0 ] : 1; //delete img["t259"];
var fo = img[ 't266' ] ? img[ 't266' ][ 0 ] : 1; //delete img["t266"];
if ( img[ 't284' ] && img[ 't284' ][ 0 ] == 2 ) console.log( 'PlanarConfiguration 2 should not be used!' );
if ( cmpr == 7 && img[ 't258' ] && img[ 't258' ].length > 3 ) img[ 't258' ] = img[ 't258' ].slice( 0, 3 );
var bipp; // bits per pixel
if ( img[ 't258' ] ) bipp = Math.min( 32, img[ 't258' ][ 0 ] ) * img[ 't258' ].length;
else bipp = ( img[ 't277' ] ? img[ 't277' ][ 0 ] : 1 );
// Some .NEF files have t258==14, even though they use 16 bits per pixel
if ( cmpr == 1 && img[ 't279' ] != null && img[ 't278' ] && img[ 't262' ][ 0 ] == 32803 ) {
bipp = Math.round( ( img[ 't279' ][ 0 ] * 8 ) / ( img.width * img[ 't278' ][ 0 ] ) );
}
var bipl = Math.ceil( img.width * bipp / 8 ) * 8;
var soff = img[ 't273' ]; if ( soff == null ) soff = img[ 't324' ];
var bcnt = img[ 't279' ]; if ( cmpr == 1 && soff.length == 1 ) bcnt = [ img.height * ( bipl >>> 3 ) ]; if ( bcnt == null ) bcnt = img[ 't325' ];
//bcnt[0] = Math.min(bcnt[0], data.length); // Hasselblad, "RAW_HASSELBLAD_H3D39II.3FR"
var bytes = new Uint8Array( img.height * ( bipl >>> 3 ) ), bilen = 0;
if ( img[ 't322' ] != null ) {
var tw = img[ 't322' ][ 0 ], th = img[ 't323' ][ 0 ];
var tx = Math.floor( ( img.width + tw - 1 ) / tw );
var ty = Math.floor( ( img.height + th - 1 ) / th );
var tbuff = new Uint8Array( Math.ceil( tw * th * bipp / 8 ) | 0 );
for ( var y = 0; y < ty; y ++ )
for ( var x = 0; x < tx; x ++ ) {
var i = y * tx + x; for ( var j = 0; j < tbuff.length; j ++ ) tbuff[ j ] = 0;
UTIF.decode._decompress( img, ifds, data, soff[ i ], bcnt[ i ], cmpr, tbuff, 0, fo );
// Might be required for 7 too. Need to check
if ( cmpr == 6 ) bytes = tbuff;
else UTIF._copyTile( tbuff, Math.ceil( tw * bipp / 8 ) | 0, th, bytes, Math.ceil( img.width * bipp / 8 ) | 0, img.height, Math.ceil( x * tw * bipp / 8 ) | 0, y * th );
}
bilen = bytes.length * 8;
} else {
var rps = img[ 't278' ] ? img[ 't278' ][ 0 ] : img.height; rps = Math.min( rps, img.height );
for ( var i = 0; i < soff.length; i ++ ) {
UTIF.decode._decompress( img, ifds, data, soff[ i ], bcnt[ i ], cmpr, bytes, Math.ceil( bilen / 8 ) | 0, fo );
bilen += bipl * rps;
}
bilen = Math.min( bilen, bytes.length * 8 );
}
img.data = new Uint8Array( bytes.buffer, 0, Math.ceil( bilen / 8 ) | 0 );
};
UTIF.decode._decompress = function ( img, ifds, data, off, len, cmpr, tgt, toff ) {
//console.log("compression", cmpr);
//var time = Date.now();
if ( cmpr == 34676 ) UTIF.decode._decodeLogLuv32( img, data, off, len, tgt, toff );
else console.log( 'Unsupported compression', cmpr );
//console.log(Date.now()-time);
var bps = ( img[ 't258' ] ? Math.min( 32, img[ 't258' ][ 0 ] ) : 1 );
var noc = ( img[ 't277' ] ? img[ 't277' ][ 0 ] : 1 ), bpp = ( bps * noc ) >>> 3, h = ( img[ 't278' ] ? img[ 't278' ][ 0 ] : img.height ), bpl = Math.ceil( bps * noc * img.width / 8 );
// convert to Little Endian /*
if ( bps == 16 && ! img.isLE && img[ 't33422' ] == null ) // not DNG
for ( var y = 0; y < h; y ++ ) {
//console.log("fixing endianity");
var roff = toff + y * bpl;
for ( var x = 1; x < bpl; x += 2 ) {
var t = tgt[ roff + x ]; tgt[ roff + x ] = tgt[ roff + x - 1 ]; tgt[ roff + x - 1 ] = t;
}
} //*/
if ( img[ 't317' ] && img[ 't317' ][ 0 ] == 2 ) {
for ( var y = 0; y < h; y ++ ) {
var ntoff = toff + y * bpl;
if ( bps == 16 ) for ( var j = bpp; j < bpl; j += 2 ) {
var nv = ( ( tgt[ ntoff + j + 1 ] << 8 ) | tgt[ ntoff + j ] ) + ( ( tgt[ ntoff + j - bpp + 1 ] << 8 ) | tgt[ ntoff + j - bpp ] );
tgt[ ntoff + j ] = nv & 255; tgt[ ntoff + j + 1 ] = ( nv >>> 8 ) & 255;
}
else if ( noc == 3 ) for ( var j = 3; j < bpl; j += 3 ) {
tgt[ ntoff + j ] = ( tgt[ ntoff + j ] + tgt[ ntoff + j - 3 ] ) & 255;
tgt[ ntoff + j + 1 ] = ( tgt[ ntoff + j + 1 ] + tgt[ ntoff + j - 2 ] ) & 255;
tgt[ ntoff + j + 2 ] = ( tgt[ ntoff + j + 2 ] + tgt[ ntoff + j - 1 ] ) & 255;
}
else for ( var j = bpp; j < bpl; j ++ ) tgt[ ntoff + j ] = ( tgt[ ntoff + j ] + tgt[ ntoff + j - bpp ] ) & 255;
}
}
};
UTIF.decode._decodeLogLuv32 = function ( img, data, off, len, tgt, toff ) {
var w = img.width, qw = w * 4;
var io = 0, out = new Uint8Array( qw );
while ( io < len ) {
var oo = 0;
while ( oo < qw ) {
var c = data[ off + io ]; io ++;
if ( c < 128 ) {
for ( var j = 0; j < c; j ++ ) out[ oo + j ] = data[ off + io + j ]; oo += c; io += c;
} else {
c = c - 126; for ( var j = 0; j < c; j ++ ) out[ oo + j ] = data[ off + io ]; oo += c; io ++;
}
}
for ( var x = 0; x < w; x ++ ) {
tgt[ toff + 0 ] = out[ x ];
tgt[ toff + 1 ] = out[ x + w ];
tgt[ toff + 2 ] = out[ x + w * 2 ];
tgt[ toff + 4 ] = out[ x + w * 3 ];
toff += 6;
}
}
};
UTIF.tags = {};
//UTIF.ttypes = { 256:3,257:3,258:3, 259:3, 262:3, 273:4, 274:3, 277:3,278:4,279:4, 282:5, 283:5, 284:3, 286:5,287:5, 296:3, 305:2, 306:2, 338:3, 513:4, 514:4, 34665:4 };
// start at tag 250
UTIF._types = function () {
var main = new Array( 250 ); main.fill( 0 );
main = main.concat( [ 0, 0, 0, 0, 4, 3, 3, 3, 3, 3, 0, 0, 3, 0, 0, 0, 3, 0, 0, 2, 2, 2, 2, 4, 3, 0, 0, 3, 4, 4, 3, 3, 5, 5, 3, 2, 5, 5, 0, 0, 0, 0, 4, 4, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 3, 5, 5, 3, 0, 3, 3, 4, 4, 4, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] );
var rest = { 33432: 2, 33434: 5, 33437: 5, 34665: 4, 34850: 3, 34853: 4, 34855: 3, 34864: 3, 34866: 4, 36864: 7, 36867: 2, 36868: 2, 37121: 7, 37377: 10, 37378: 5, 37380: 10, 37381: 5, 37383: 3, 37384: 3, 37385: 3, 37386: 5, 37510: 7, 37520: 2, 37521: 2, 37522: 2, 40960: 7, 40961: 3, 40962: 4, 40963: 4, 40965: 4, 41486: 5, 41487: 5, 41488: 3, 41985: 3, 41986: 3, 41987: 3, 41988: 5, 41989: 3, 41990: 3, 41993: 3, 41994: 3, 41995: 7, 41996: 3, 42032: 2, 42033: 2, 42034: 5, 42036: 2, 42037: 2, 59932: 7 };
return {
basic: {
main: main,
rest: rest
},
gps: {
main: [ 1, 2, 5, 2, 5, 1, 5, 5, 0, 9 ],
rest: { 18: 2, 29: 2 }
}
};
}();
UTIF._readIFD = function ( bin, data, offset, ifds, depth, prm ) {
var cnt = bin.readUshort( data, offset ); offset += 2;
var ifd = {};
if ( prm.debug ) console.log( ' '.repeat( depth ), ifds.length - 1, '>>>----------------' );
for ( var i = 0; i < cnt; i ++ ) {
var tag = bin.readUshort( data, offset ); offset += 2;
var type = bin.readUshort( data, offset ); offset += 2;
var num = bin.readUint( data, offset ); offset += 4;
var voff = bin.readUint( data, offset ); offset += 4;
var arr = [];
//ifd["t"+tag+"-"+UTIF.tags[tag]] = arr;
if ( type == 1 || type == 7 ) {
arr = new Uint8Array( data.buffer, ( num < 5 ? offset - 4 : voff ), num );
}
if ( type == 2 ) {
var o0 = ( num < 5 ? offset - 4 : voff ), c = data[ o0 ], len = Math.max( 0, Math.min( num - 1, data.length - o0 ) );
if ( c < 128 || len == 0 ) arr.push( bin.readASCII( data, o0, len ) );
else arr = new Uint8Array( data.buffer, o0, len );
}
if ( type == 3 ) {
for ( var j = 0; j < num; j ++ ) arr.push( bin.readUshort( data, ( num < 3 ? offset - 4 : voff ) + 2 * j ) );
}
if ( type == 4
|| type == 13 ) {
for ( var j = 0; j < num; j ++ ) arr.push( bin.readUint( data, ( num < 2 ? offset - 4 : voff ) + 4 * j ) );
}
if ( type == 5 || type == 10 ) {
var ri = type == 5 ? bin.readUint : bin.readInt;
for ( var j = 0; j < num; j ++ ) arr.push( [ ri( data, voff + j * 8 ), ri( data, voff + j * 8 + 4 ) ] );
}
if ( type == 8 ) {
for ( var j = 0; j < num; j ++ ) arr.push( bin.readShort( data, ( num < 3 ? offset - 4 : voff ) + 2 * j ) );
}
if ( type == 9 ) {
for ( var j = 0; j < num; j ++ ) arr.push( bin.readInt( data, ( num < 2 ? offset - 4 : voff ) + 4 * j ) );
}
if ( type == 11 ) {
for ( var j = 0; j < num; j ++ ) arr.push( bin.readFloat( data, voff + j * 4 ) );
}
if ( type == 12 ) {
for ( var j = 0; j < num; j ++ ) arr.push( bin.readDouble( data, voff + j * 8 ) );
}
if ( num != 0 && arr.length == 0 ) {
console.log( tag, 'unknown TIFF tag type: ', type, 'num:', num ); if ( i == 0 ) return; continue;
}
if ( prm.debug ) console.log( ' '.repeat( depth ), tag, type, UTIF.tags[ tag ], arr );
ifd[ 't' + tag ] = arr;
if ( tag == 330 || tag == 34665 || tag == 34853 || ( tag == 50740 && bin.readUshort( data, bin.readUint( arr, 0 ) ) < 300 ) || tag == 61440 ) {
var oarr = tag == 50740 ? [ bin.readUint( arr, 0 ) ] : arr;
var subfd = [];
for ( var j = 0; j < oarr.length; j ++ ) UTIF._readIFD( bin, data, oarr[ j ], subfd, depth + 1, prm );
if ( tag == 330 ) ifd.subIFD = subfd;
if ( tag == 34665 ) ifd.exifIFD = subfd[ 0 ];
if ( tag == 34853 ) ifd.gpsiIFD = subfd[ 0 ]; //console.log("gps", subfd[0]); }
if ( tag == 50740 ) ifd.dngPrvt = subfd[ 0 ];
if ( tag == 61440 ) ifd.fujiIFD = subfd[ 0 ];
}
if ( tag == 37500 && prm.parseMN ) {
var mn = arr;
//console.log(bin.readASCII(mn,0,mn.length), mn);
if ( bin.readASCII( mn, 0, 5 ) == 'Nikon' ) ifd.makerNote = UTIF[ 'decode' ]( mn.slice( 10 ).buffer )[ 0 ];
else if ( bin.readUshort( data, voff ) < 300 && bin.readUshort( data, voff + 4 ) <= 12 ) {
var subsub = []; UTIF._readIFD( bin, data, voff, subsub, depth + 1, prm );
ifd.makerNote = subsub[ 0 ];
}
}
}
ifds.push( ifd );
if ( prm.debug ) console.log( ' '.repeat( depth ), '<<<---------------' );
return offset;
};
UTIF.toRGBA = function ( out, type ) {
const w = out.width, h = out.height, area = w * h, data = out.data;
let img;
switch ( type ) {
case HalfFloatType:
img = new Uint16Array( area * 4 );
break;
case FloatType:
img = new Float32Array( area * 4 );
break;
default:
throw new Error( 'THREE.LogLuvLoader: Unsupported texture data type: ' + type );
}
let intp = out[ 't262' ] ? out[ 't262' ][ 0 ] : 2;
const bps = out[ 't258' ] ? Math.min( 32, out[ 't258' ][ 0 ] ) : 1;
if ( out[ 't262' ] == null && bps == 1 ) intp = 0;
if ( intp == 32845 ) {
for ( let y = 0; y < h; y ++ ) {
for ( let x = 0; x < w; x ++ ) {
const si = ( y * w + x ) * 6, qi = ( y * w + x ) * 4;
let L = ( data[ si + 1 ] << 8 ) | data[ si ];
L = Math.pow( 2, ( L + 0.5 ) / 256 - 64 );
const u = ( data[ si + 3 ] + 0.5 ) / 410;
const v = ( data[ si + 5 ] + 0.5 ) / 410;
// Luv to xyY
const sX = ( 9 * u ) / ( 6 * u - 16 * v + 12 );
const sY = ( 4 * v ) / ( 6 * u - 16 * v + 12 );
const bY = L;
// xyY to XYZ
const X = ( sX * bY ) / sY, Y = bY, Z = ( 1 - sX - sY ) * bY / sY;
// XYZ to linear RGB
const r = 2.690 * X - 1.276 * Y - 0.414 * Z;
const g = - 1.022 * X + 1.978 * Y + 0.044 * Z;
const b = 0.061 * X - 0.224 * Y + 1.163 * Z;
if ( type === HalfFloatType ) {
img[ qi ] = DataUtils.toHalfFloat( Math.min( r, 65504 ) );
img[ qi + 1 ] = DataUtils.toHalfFloat( Math.min( g, 65504 ) );
img[ qi + 2 ] = DataUtils.toHalfFloat( Math.min( b, 65504 ) );
img[ qi + 3 ] = DataUtils.toHalfFloat( 1 );
} else {
img[ qi ] = r;
img[ qi + 1 ] = g;
img[ qi + 2 ] = b;
img[ qi + 3 ] = 1;
}
}
}
} else {
throw new Error( 'THREE.LogLuvLoader: Unsupported Photometric interpretation: ' + intp );
}
return img;
};
UTIF._binBE =
{
nextZero: function ( data, o ) {
while ( data[ o ] != 0 ) o ++; return o;
},
readUshort: function ( buff, p ) {
return ( buff[ p ] << 8 ) | buff[ p + 1 ];
},
readShort: function ( buff, p ) {
var a = UTIF._binBE.ui8; a[ 0 ] = buff[ p + 1 ]; a[ 1 ] = buff[ p + 0 ]; return UTIF._binBE.i16[ 0 ];
},
readInt: function ( buff, p ) {
var a = UTIF._binBE.ui8; a[ 0 ] = buff[ p + 3 ]; a[ 1 ] = buff[ p + 2 ]; a[ 2 ] = buff[ p + 1 ]; a[ 3 ] = buff[ p + 0 ]; return UTIF._binBE.i32[ 0 ];
},
readUint: function ( buff, p ) {
var a = UTIF._binBE.ui8; a[ 0 ] = buff[ p + 3 ]; a[ 1 ] = buff[ p + 2 ]; a[ 2 ] = buff[ p + 1 ]; a[ 3 ] = buff[ p + 0 ]; return UTIF._binBE.ui32[ 0 ];
},
readASCII: function ( buff, p, l ) {
var s = ''; for ( var i = 0; i < l; i ++ ) s += String.fromCharCode( buff[ p + i ] ); return s;
},
readFloat: function ( buff, p ) {
var a = UTIF._binBE.ui8; for ( var i = 0; i < 4; i ++ ) a[ i ] = buff[ p + 3 - i ]; return UTIF._binBE.fl32[ 0 ];
},
readDouble: function ( buff, p ) {
var a = UTIF._binBE.ui8; for ( var i = 0; i < 8; i ++ ) a[ i ] = buff[ p + 7 - i ]; return UTIF._binBE.fl64[ 0 ];
},
writeUshort: function ( buff, p, n ) {
buff[ p ] = ( n >> 8 ) & 255; buff[ p + 1 ] = n & 255;
},
writeInt: function ( buff, p, n ) {
var a = UTIF._binBE.ui8; UTIF._binBE.i32[ 0 ] = n; buff[ p + 3 ] = a[ 0 ]; buff[ p + 2 ] = a[ 1 ]; buff[ p + 1 ] = a[ 2 ]; buff[ p + 0 ] = a[ 3 ];
},
writeUint: function ( buff, p, n ) {
buff[ p ] = ( n >> 24 ) & 255; buff[ p + 1 ] = ( n >> 16 ) & 255; buff[ p + 2 ] = ( n >> 8 ) & 255; buff[ p + 3 ] = ( n >> 0 ) & 255;
},
writeASCII: function ( buff, p, s ) {
for ( var i = 0; i < s.length; i ++ ) buff[ p + i ] = s.charCodeAt( i );
},
writeDouble: function ( buff, p, n ) {
UTIF._binBE.fl64[ 0 ] = n;
for ( var i = 0; i < 8; i ++ ) buff[ p + i ] = UTIF._binBE.ui8[ 7 - i ];
}
};
UTIF._binBE.ui8 = new Uint8Array( 8 );
UTIF._binBE.i16 = new Int16Array( UTIF._binBE.ui8.buffer );
UTIF._binBE.i32 = new Int32Array( UTIF._binBE.ui8.buffer );
UTIF._binBE.ui32 = new Uint32Array( UTIF._binBE.ui8.buffer );
UTIF._binBE.fl32 = new Float32Array( UTIF._binBE.ui8.buffer );
UTIF._binBE.fl64 = new Float64Array( UTIF._binBE.ui8.buffer );
UTIF._binLE =
{
nextZero: UTIF._binBE.nextZero,
readUshort: function ( buff, p ) {
return ( buff[ p + 1 ] << 8 ) | buff[ p ];
},
readShort: function ( buff, p ) {
var a = UTIF._binBE.ui8; a[ 0 ] = buff[ p + 0 ]; a[ 1 ] = buff[ p + 1 ]; return UTIF._binBE.i16[ 0 ];
},
readInt: function ( buff, p ) {
var a = UTIF._binBE.ui8; a[ 0 ] = buff[ p + 0 ]; a[ 1 ] = buff[ p + 1 ]; a[ 2 ] = buff[ p + 2 ]; a[ 3 ] = buff[ p + 3 ]; return UTIF._binBE.i32[ 0 ];
},
readUint: function ( buff, p ) {
var a = UTIF._binBE.ui8; a[ 0 ] = buff[ p + 0 ]; a[ 1 ] = buff[ p + 1 ]; a[ 2 ] = buff[ p + 2 ]; a[ 3 ] = buff[ p + 3 ]; return UTIF._binBE.ui32[ 0 ];
},
readASCII: UTIF._binBE.readASCII,
readFloat: function ( buff, p ) {
var a = UTIF._binBE.ui8; for ( var i = 0; i < 4; i ++ ) a[ i ] = buff[ p + i ]; return UTIF._binBE.fl32[ 0 ];
},
readDouble: function ( buff, p ) {
var a = UTIF._binBE.ui8; for ( var i = 0; i < 8; i ++ ) a[ i ] = buff[ p + i ]; return UTIF._binBE.fl64[ 0 ];
},
writeUshort: function ( buff, p, n ) {
buff[ p ] = ( n ) & 255; buff[ p + 1 ] = ( n >> 8 ) & 255;
},
writeInt: function ( buff, p, n ) {
var a = UTIF._binBE.ui8; UTIF._binBE.i32[ 0 ] = n; buff[ p + 0 ] = a[ 0 ]; buff[ p + 1 ] = a[ 1 ]; buff[ p + 2 ] = a[ 2 ]; buff[ p + 3 ] = a[ 3 ];
},
writeUint: function ( buff, p, n ) {
buff[ p ] = ( n >>> 0 ) & 255; buff[ p + 1 ] = ( n >>> 8 ) & 255; buff[ p + 2 ] = ( n >>> 16 ) & 255; buff[ p + 3 ] = ( n >>> 24 ) & 255;
},
writeASCII: UTIF._binBE.writeASCII
};
UTIF._copyTile = function ( tb, tw, th, b, w, h, xoff, yoff ) {
//log("copyTile", tw, th, w, h, xoff, yoff);
var xlim = Math.min( tw, w - xoff );
var ylim = Math.min( th, h - yoff );
for ( var y = 0; y < ylim; y ++ ) {
var tof = ( yoff + y ) * w + xoff;
var sof = y * tw;
for ( var x = 0; x < xlim; x ++ ) b[ tof + x ] = tb[ sof + x ];
}
};
export { LogLuvLoader };

View File

@ -0,0 +1,77 @@
import {
FileLoader,
Loader,
CanvasTexture,
NearestFilter,
SRGBColorSpace
} from '/static/javascript/three/build/three.module.js';
import lottie from '../libs/lottie_canvas.module.js';
class LottieLoader extends Loader {
setQuality( value ) {
this._quality = value;
}
load( url, onLoad, onProgress, onError ) {
const quality = this._quality || 1;
const texture = new CanvasTexture();
texture.minFilter = NearestFilter;
texture.colorSpace = SRGBColorSpace;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
const data = JSON.parse( text );
// lottie uses container.offetWidth and offsetHeight
// to define width/height
const container = document.createElement( 'div' );
container.style.width = data.w + 'px';
container.style.height = data.h + 'px';
document.body.appendChild( container );
const animation = lottie.loadAnimation( {
container: container,
animType: 'canvas',
loop: true,
autoplay: true,
animationData: data,
rendererSettings: { dpr: quality }
} );
texture.animation = animation;
texture.image = animation.container;
animation.addEventListener( 'enterFrame', function () {
texture.needsUpdate = true;
} );
container.style.display = 'none';
if ( onLoad !== undefined ) {
onLoad( texture );
}
}, onProgress, onError );
return texture;
}
}
export { LottieLoader };

View File

@ -0,0 +1,64 @@
import {
Color
} from '/static/javascript/three/build/three.module.js';
/**
* Luminosity
* http://en.wikipedia.org/wiki/Luminosity
*/
const LuminosityHighPassShader = {
name: 'LuminosityHighPassShader',
shaderID: 'luminosityHighPass',
uniforms: {
'tDiffuse': { value: null },
'luminosityThreshold': { value: 1.0 },
'smoothWidth': { value: 1.0 },
'defaultColor': { value: new Color( 0x000000 ) },
'defaultOpacity': { value: 0.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform vec3 defaultColor;
uniform float defaultOpacity;
uniform float luminosityThreshold;
uniform float smoothWidth;
varying vec2 vUv;
void main() {
vec4 texel = texture2D( tDiffuse, vUv );
float v = luminance( texel.xyz );
vec4 outputColor = vec4( defaultColor.rgb, defaultOpacity );
float alpha = smoothstep( luminosityThreshold, luminosityThreshold + smoothWidth, v );
gl_FragColor = mix( outputColor, texel, alpha );
}`
};
export { LuminosityHighPassShader };

View File

@ -0,0 +1,48 @@
/**
* Luminosity
* http://en.wikipedia.org/wiki/Luminosity
*/
const LuminosityShader = {
name: 'LuminosityShader',
uniforms: {
'tDiffuse': { value: null }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
#include <common>
uniform sampler2D tDiffuse;
varying vec2 vUv;
void main() {
vec4 texel = texture2D( tDiffuse, vUv );
float l = luminance( texel.rgb );
gl_FragColor = vec4( l, l, l, texel.w );
}`
};
export { LuminosityShader };

View File

@ -0,0 +1,399 @@
import {
AnimationClip,
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Loader,
Vector3
} from '/static/javascript/three/build/three.module.js';
const _normalData = [
[ - 0.525731, 0.000000, 0.850651 ], [ - 0.442863, 0.238856, 0.864188 ],
[ - 0.295242, 0.000000, 0.955423 ], [ - 0.309017, 0.500000, 0.809017 ],
[ - 0.162460, 0.262866, 0.951056 ], [ 0.000000, 0.000000, 1.000000 ],
[ 0.000000, 0.850651, 0.525731 ], [ - 0.147621, 0.716567, 0.681718 ],
[ 0.147621, 0.716567, 0.681718 ], [ 0.000000, 0.525731, 0.850651 ],
[ 0.309017, 0.500000, 0.809017 ], [ 0.525731, 0.000000, 0.850651 ],
[ 0.295242, 0.000000, 0.955423 ], [ 0.442863, 0.238856, 0.864188 ],
[ 0.162460, 0.262866, 0.951056 ], [ - 0.681718, 0.147621, 0.716567 ],
[ - 0.809017, 0.309017, 0.500000 ], [ - 0.587785, 0.425325, 0.688191 ],
[ - 0.850651, 0.525731, 0.000000 ], [ - 0.864188, 0.442863, 0.238856 ],
[ - 0.716567, 0.681718, 0.147621 ], [ - 0.688191, 0.587785, 0.425325 ],
[ - 0.500000, 0.809017, 0.309017 ], [ - 0.238856, 0.864188, 0.442863 ],
[ - 0.425325, 0.688191, 0.587785 ], [ - 0.716567, 0.681718, - 0.147621 ],
[ - 0.500000, 0.809017, - 0.309017 ], [ - 0.525731, 0.850651, 0.000000 ],
[ 0.000000, 0.850651, - 0.525731 ], [ - 0.238856, 0.864188, - 0.442863 ],
[ 0.000000, 0.955423, - 0.295242 ], [ - 0.262866, 0.951056, - 0.162460 ],
[ 0.000000, 1.000000, 0.000000 ], [ 0.000000, 0.955423, 0.295242 ],
[ - 0.262866, 0.951056, 0.162460 ], [ 0.238856, 0.864188, 0.442863 ],
[ 0.262866, 0.951056, 0.162460 ], [ 0.500000, 0.809017, 0.309017 ],
[ 0.238856, 0.864188, - 0.442863 ], [ 0.262866, 0.951056, - 0.162460 ],
[ 0.500000, 0.809017, - 0.309017 ], [ 0.850651, 0.525731, 0.000000 ],
[ 0.716567, 0.681718, 0.147621 ], [ 0.716567, 0.681718, - 0.147621 ],
[ 0.525731, 0.850651, 0.000000 ], [ 0.425325, 0.688191, 0.587785 ],
[ 0.864188, 0.442863, 0.238856 ], [ 0.688191, 0.587785, 0.425325 ],
[ 0.809017, 0.309017, 0.500000 ], [ 0.681718, 0.147621, 0.716567 ],
[ 0.587785, 0.425325, 0.688191 ], [ 0.955423, 0.295242, 0.000000 ],
[ 1.000000, 0.000000, 0.000000 ], [ 0.951056, 0.162460, 0.262866 ],
[ 0.850651, - 0.525731, 0.000000 ], [ 0.955423, - 0.295242, 0.000000 ],
[ 0.864188, - 0.442863, 0.238856 ], [ 0.951056, - 0.162460, 0.262866 ],
[ 0.809017, - 0.309017, 0.500000 ], [ 0.681718, - 0.147621, 0.716567 ],
[ 0.850651, 0.000000, 0.525731 ], [ 0.864188, 0.442863, - 0.238856 ],
[ 0.809017, 0.309017, - 0.500000 ], [ 0.951056, 0.162460, - 0.262866 ],
[ 0.525731, 0.000000, - 0.850651 ], [ 0.681718, 0.147621, - 0.716567 ],
[ 0.681718, - 0.147621, - 0.716567 ], [ 0.850651, 0.000000, - 0.525731 ],
[ 0.809017, - 0.309017, - 0.500000 ], [ 0.864188, - 0.442863, - 0.238856 ],
[ 0.951056, - 0.162460, - 0.262866 ], [ 0.147621, 0.716567, - 0.681718 ],
[ 0.309017, 0.500000, - 0.809017 ], [ 0.425325, 0.688191, - 0.587785 ],
[ 0.442863, 0.238856, - 0.864188 ], [ 0.587785, 0.425325, - 0.688191 ],
[ 0.688191, 0.587785, - 0.425325 ], [ - 0.147621, 0.716567, - 0.681718 ],
[ - 0.309017, 0.500000, - 0.809017 ], [ 0.000000, 0.525731, - 0.850651 ],
[ - 0.525731, 0.000000, - 0.850651 ], [ - 0.442863, 0.238856, - 0.864188 ],
[ - 0.295242, 0.000000, - 0.955423 ], [ - 0.162460, 0.262866, - 0.951056 ],
[ 0.000000, 0.000000, - 1.000000 ], [ 0.295242, 0.000000, - 0.955423 ],
[ 0.162460, 0.262866, - 0.951056 ], [ - 0.442863, - 0.238856, - 0.864188 ],
[ - 0.309017, - 0.500000, - 0.809017 ], [ - 0.162460, - 0.262866, - 0.951056 ],
[ 0.000000, - 0.850651, - 0.525731 ], [ - 0.147621, - 0.716567, - 0.681718 ],
[ 0.147621, - 0.716567, - 0.681718 ], [ 0.000000, - 0.525731, - 0.850651 ],
[ 0.309017, - 0.500000, - 0.809017 ], [ 0.442863, - 0.238856, - 0.864188 ],
[ 0.162460, - 0.262866, - 0.951056 ], [ 0.238856, - 0.864188, - 0.442863 ],
[ 0.500000, - 0.809017, - 0.309017 ], [ 0.425325, - 0.688191, - 0.587785 ],
[ 0.716567, - 0.681718, - 0.147621 ], [ 0.688191, - 0.587785, - 0.425325 ],
[ 0.587785, - 0.425325, - 0.688191 ], [ 0.000000, - 0.955423, - 0.295242 ],
[ 0.000000, - 1.000000, 0.000000 ], [ 0.262866, - 0.951056, - 0.162460 ],
[ 0.000000, - 0.850651, 0.525731 ], [ 0.000000, - 0.955423, 0.295242 ],
[ 0.238856, - 0.864188, 0.442863 ], [ 0.262866, - 0.951056, 0.162460 ],
[ 0.500000, - 0.809017, 0.309017 ], [ 0.716567, - 0.681718, 0.147621 ],
[ 0.525731, - 0.850651, 0.000000 ], [ - 0.238856, - 0.864188, - 0.442863 ],
[ - 0.500000, - 0.809017, - 0.309017 ], [ - 0.262866, - 0.951056, - 0.162460 ],
[ - 0.850651, - 0.525731, 0.000000 ], [ - 0.716567, - 0.681718, - 0.147621 ],
[ - 0.716567, - 0.681718, 0.147621 ], [ - 0.525731, - 0.850651, 0.000000 ],
[ - 0.500000, - 0.809017, 0.309017 ], [ - 0.238856, - 0.864188, 0.442863 ],
[ - 0.262866, - 0.951056, 0.162460 ], [ - 0.864188, - 0.442863, 0.238856 ],
[ - 0.809017, - 0.309017, 0.500000 ], [ - 0.688191, - 0.587785, 0.425325 ],
[ - 0.681718, - 0.147621, 0.716567 ], [ - 0.442863, - 0.238856, 0.864188 ],
[ - 0.587785, - 0.425325, 0.688191 ], [ - 0.309017, - 0.500000, 0.809017 ],
[ - 0.147621, - 0.716567, 0.681718 ], [ - 0.425325, - 0.688191, 0.587785 ],
[ - 0.162460, - 0.262866, 0.951056 ], [ 0.442863, - 0.238856, 0.864188 ],
[ 0.162460, - 0.262866, 0.951056 ], [ 0.309017, - 0.500000, 0.809017 ],
[ 0.147621, - 0.716567, 0.681718 ], [ 0.000000, - 0.525731, 0.850651 ],
[ 0.425325, - 0.688191, 0.587785 ], [ 0.587785, - 0.425325, 0.688191 ],
[ 0.688191, - 0.587785, 0.425325 ], [ - 0.955423, 0.295242, 0.000000 ],
[ - 0.951056, 0.162460, 0.262866 ], [ - 1.000000, 0.000000, 0.000000 ],
[ - 0.850651, 0.000000, 0.525731 ], [ - 0.955423, - 0.295242, 0.000000 ],
[ - 0.951056, - 0.162460, 0.262866 ], [ - 0.864188, 0.442863, - 0.238856 ],
[ - 0.951056, 0.162460, - 0.262866 ], [ - 0.809017, 0.309017, - 0.500000 ],
[ - 0.864188, - 0.442863, - 0.238856 ], [ - 0.951056, - 0.162460, - 0.262866 ],
[ - 0.809017, - 0.309017, - 0.500000 ], [ - 0.681718, 0.147621, - 0.716567 ],
[ - 0.681718, - 0.147621, - 0.716567 ], [ - 0.850651, 0.000000, - 0.525731 ],
[ - 0.688191, 0.587785, - 0.425325 ], [ - 0.587785, 0.425325, - 0.688191 ],
[ - 0.425325, 0.688191, - 0.587785 ], [ - 0.425325, - 0.688191, - 0.587785 ],
[ - 0.587785, - 0.425325, - 0.688191 ], [ - 0.688191, - 0.587785, - 0.425325 ]
];
class MD2Loader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( buffer ) {
try {
onLoad( scope.parse( buffer ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
parse( buffer ) {
const data = new DataView( buffer );
// http://tfc.duke.free.fr/coding/md2-specs-en.html
const header = {};
const headerNames = [
'ident', 'version',
'skinwidth', 'skinheight',
'framesize',
'num_skins', 'num_vertices', 'num_st', 'num_tris', 'num_glcmds', 'num_frames',
'offset_skins', 'offset_st', 'offset_tris', 'offset_frames', 'offset_glcmds', 'offset_end'
];
for ( let i = 0; i < headerNames.length; i ++ ) {
header[ headerNames[ i ] ] = data.getInt32( i * 4, true );
}
if ( header.ident !== 844121161 || header.version !== 8 ) {
console.error( 'Not a valid MD2 file' );
return;
}
if ( header.offset_end !== data.byteLength ) {
console.error( 'Corrupted MD2 file' );
return;
}
//
const geometry = new BufferGeometry();
// uvs
const uvsTemp = [];
let offset = header.offset_st;
for ( let i = 0, l = header.num_st; i < l; i ++ ) {
const u = data.getInt16( offset + 0, true );
const v = data.getInt16( offset + 2, true );
uvsTemp.push( u / header.skinwidth, 1 - ( v / header.skinheight ) );
offset += 4;
}
// triangles
offset = header.offset_tris;
const vertexIndices = [];
const uvIndices = [];
for ( let i = 0, l = header.num_tris; i < l; i ++ ) {
vertexIndices.push(
data.getUint16( offset + 0, true ),
data.getUint16( offset + 2, true ),
data.getUint16( offset + 4, true )
);
uvIndices.push(
data.getUint16( offset + 6, true ),
data.getUint16( offset + 8, true ),
data.getUint16( offset + 10, true )
);
offset += 12;
}
// frames
const translation = new Vector3();
const scale = new Vector3();
const frames = [];
offset = header.offset_frames;
for ( let i = 0, l = header.num_frames; i < l; i ++ ) {
scale.set(
data.getFloat32( offset + 0, true ),
data.getFloat32( offset + 4, true ),
data.getFloat32( offset + 8, true )
);
translation.set(
data.getFloat32( offset + 12, true ),
data.getFloat32( offset + 16, true ),
data.getFloat32( offset + 20, true )
);
offset += 24;
const string = [];
for ( let j = 0; j < 16; j ++ ) {
const character = data.getUint8( offset + j );
if ( character === 0 ) break;
string[ j ] = character;
}
const frame = {
name: String.fromCharCode.apply( null, string ),
vertices: [],
normals: []
};
offset += 16;
for ( let j = 0; j < header.num_vertices; j ++ ) {
let x = data.getUint8( offset ++ );
let y = data.getUint8( offset ++ );
let z = data.getUint8( offset ++ );
const n = _normalData[ data.getUint8( offset ++ ) ];
x = x * scale.x + translation.x;
y = y * scale.y + translation.y;
z = z * scale.z + translation.z;
frame.vertices.push( x, z, y ); // convert to Y-up
frame.normals.push( n[ 0 ], n[ 2 ], n[ 1 ] ); // convert to Y-up
}
frames.push( frame );
}
// static
const positions = [];
const normals = [];
const uvs = [];
const verticesTemp = frames[ 0 ].vertices;
const normalsTemp = frames[ 0 ].normals;
for ( let i = 0, l = vertexIndices.length; i < l; i ++ ) {
const vertexIndex = vertexIndices[ i ];
let stride = vertexIndex * 3;
//
const x = verticesTemp[ stride ];
const y = verticesTemp[ stride + 1 ];
const z = verticesTemp[ stride + 2 ];
positions.push( x, y, z );
//
const nx = normalsTemp[ stride ];
const ny = normalsTemp[ stride + 1 ];
const nz = normalsTemp[ stride + 2 ];
normals.push( nx, ny, nz );
//
const uvIndex = uvIndices[ i ];
stride = uvIndex * 2;
const u = uvsTemp[ stride ];
const v = uvsTemp[ stride + 1 ];
uvs.push( u, v );
}
geometry.setAttribute( 'position', new Float32BufferAttribute( positions, 3 ) );
geometry.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) );
geometry.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) );
// animation
const morphPositions = [];
const morphNormals = [];
for ( let i = 0, l = frames.length; i < l; i ++ ) {
const frame = frames[ i ];
const attributeName = frame.name;
if ( frame.vertices.length > 0 ) {
const positions = [];
for ( let j = 0, jl = vertexIndices.length; j < jl; j ++ ) {
const vertexIndex = vertexIndices[ j ];
const stride = vertexIndex * 3;
const x = frame.vertices[ stride ];
const y = frame.vertices[ stride + 1 ];
const z = frame.vertices[ stride + 2 ];
positions.push( x, y, z );
}
const positionAttribute = new Float32BufferAttribute( positions, 3 );
positionAttribute.name = attributeName;
morphPositions.push( positionAttribute );
}
if ( frame.normals.length > 0 ) {
const normals = [];
for ( let j = 0, jl = vertexIndices.length; j < jl; j ++ ) {
const vertexIndex = vertexIndices[ j ];
const stride = vertexIndex * 3;
const nx = frame.normals[ stride ];
const ny = frame.normals[ stride + 1 ];
const nz = frame.normals[ stride + 2 ];
normals.push( nx, ny, nz );
}
const normalAttribute = new Float32BufferAttribute( normals, 3 );
normalAttribute.name = attributeName;
morphNormals.push( normalAttribute );
}
}
geometry.morphAttributes.position = morphPositions;
geometry.morphAttributes.normal = morphNormals;
geometry.morphTargetsRelative = false;
geometry.animations = AnimationClip.CreateClipsFromMorphTargetSequences( frames, 10 );
return geometry;
}
}
export { MD2Loader };

View File

@ -0,0 +1,102 @@
/**
* MDD is a special format that stores a position for every vertex in a model for every frame in an animation.
* Similar to BVH, it can be used to transfer animation data between different 3D applications or engines.
*
* MDD stores its data in binary format (big endian) in the following way:
*
* number of frames (a single uint32)
* number of vertices (a single uint32)
* time values for each frame (sequence of float32)
* vertex data for each frame (sequence of float32)
*/
import {
AnimationClip,
BufferAttribute,
FileLoader,
Loader,
NumberKeyframeTrack
} from '/static/javascript/three/build/three.module.js';
class MDDLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.load( url, function ( data ) {
onLoad( scope.parse( data ) );
}, onProgress, onError );
}
parse( data ) {
const view = new DataView( data );
const totalFrames = view.getUint32( 0 );
const totalPoints = view.getUint32( 4 );
let offset = 8;
// animation clip
const times = new Float32Array( totalFrames );
const values = new Float32Array( totalFrames * totalFrames ).fill( 0 );
for ( let i = 0; i < totalFrames; i ++ ) {
times[ i ] = view.getFloat32( offset ); offset += 4;
values[ ( totalFrames * i ) + i ] = 1;
}
const track = new NumberKeyframeTrack( '.morphTargetInfluences', times, values );
const clip = new AnimationClip( 'default', times[ times.length - 1 ], [ track ] );
// morph targets
const morphTargets = [];
for ( let i = 0; i < totalFrames; i ++ ) {
const morphTarget = new Float32Array( totalPoints * 3 );
for ( let j = 0; j < totalPoints; j ++ ) {
const stride = ( j * 3 );
morphTarget[ stride + 0 ] = view.getFloat32( offset ); offset += 4; // x
morphTarget[ stride + 1 ] = view.getFloat32( offset ); offset += 4; // y
morphTarget[ stride + 2 ] = view.getFloat32( offset ); offset += 4; // z
}
const attribute = new BufferAttribute( morphTarget, 3 );
attribute.name = 'morph_' + i;
morphTargets.push( attribute );
}
return {
morphTargets: morphTargets,
clip: clip
};
}
}
export { MDDLoader };

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,134 @@
/**
* MMD Toon Shader
*
* This shader is extended from MeshPhongMaterial, and merged algorithms with
* MeshToonMaterial and MeshMetcapMaterial.
* Ideas came from https://github.com/mrdoob/three.js/issues/19609
*
* Combining steps:
* * Declare matcap uniform.
* * Add gradientmap_pars_fragment.
* * Use gradient irradiances instead of dotNL irradiance from MeshPhongMaterial.
* (Replace lights_phong_pars_fragment with lights_mmd_toon_pars_fragment)
* * Add mmd_toon_matcap_fragment.
*/
import { UniformsUtils, ShaderLib } from '/static/javascript/three/build/three.module.js';
const lights_mmd_toon_pars_fragment = /* glsl */`
varying vec3 vViewPosition;
struct BlinnPhongMaterial {
vec3 diffuseColor;
vec3 specularColor;
float specularShininess;
float specularStrength;
};
void RE_Direct_BlinnPhong( const in IncidentLight directLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {
vec3 irradiance = getGradientIrradiance( geometryNormal, directLight.direction ) * directLight.color;
reflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );
reflectedLight.directSpecular += irradiance * BRDF_BlinnPhong( directLight.direction, geometryViewDir, geometryNormal, material.specularColor, material.specularShininess ) * material.specularStrength;
}
void RE_IndirectDiffuse_BlinnPhong( const in vec3 irradiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {
reflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );
}
#define RE_Direct RE_Direct_BlinnPhong
#define RE_IndirectDiffuse RE_IndirectDiffuse_BlinnPhong
`;
const mmd_toon_matcap_fragment = /* glsl */`
#ifdef USE_MATCAP
vec3 viewDir = normalize( vViewPosition );
vec3 x = normalize( vec3( viewDir.z, 0.0, - viewDir.x ) );
vec3 y = cross( viewDir, x );
vec2 uv = vec2( dot( x, normal ), dot( y, normal ) ) * 0.495 + 0.5; // 0.495 to remove artifacts caused by undersized matcap disks
vec4 matcapColor = texture2D( matcap, uv );
#ifdef MATCAP_BLENDING_MULTIPLY
outgoingLight *= matcapColor.rgb;
#elif defined( MATCAP_BLENDING_ADD )
outgoingLight += matcapColor.rgb;
#endif
#endif
`;
const MMDToonShader = {
name: 'MMDToonShader',
defines: {
TOON: true,
MATCAP: true,
MATCAP_BLENDING_ADD: true,
},
uniforms: UniformsUtils.merge( [
ShaderLib.toon.uniforms,
ShaderLib.phong.uniforms,
ShaderLib.matcap.uniforms,
] ),
vertexShader:
ShaderLib.phong.vertexShader
.replace(
'#include <envmap_pars_vertex>',
''
)
.replace(
'#include <envmap_vertex>',
''
),
fragmentShader:
ShaderLib.phong.fragmentShader
.replace(
'#include <common>',
`
#ifdef USE_MATCAP
uniform sampler2D matcap;
#endif
#include <common>
`
)
.replace(
'#include <envmap_common_pars_fragment>',
`
#include <gradientmap_pars_fragment>
`
)
.replace(
'#include <envmap_pars_fragment>',
''
)
.replace(
'#include <lights_phong_pars_fragment>',
lights_mmd_toon_pars_fragment
)
.replace(
'#include <envmap_fragment>',
`
${mmd_toon_matcap_fragment}
`
)
};
export { MMDToonShader };

View File

@ -0,0 +1,567 @@
import {
Color,
DefaultLoadingManager,
FileLoader,
FrontSide,
Loader,
LoaderUtils,
MeshPhongMaterial,
RepeatWrapping,
TextureLoader,
Vector2,
SRGBColorSpace
} from '/static/javascript/three/build/three.module.js';
/**
* Loads a Wavefront .mtl file specifying materials
*/
class MTLLoader extends Loader {
constructor( manager ) {
super( manager );
}
/**
* Loads and parses a MTL asset from a URL.
*
* @param {String} url - URL to the MTL file.
* @param {Function} [onLoad] - Callback invoked with the loaded object.
* @param {Function} [onProgress] - Callback for download progress.
* @param {Function} [onError] - Callback for download errors.
*
* @see setPath setResourcePath
*
* @note In order for relative texture references to resolve correctly
* you must call setResourcePath() explicitly prior to load.
*/
load( url, onLoad, onProgress, onError ) {
const scope = this;
const path = ( this.path === '' ) ? LoaderUtils.extractUrlBase( url ) : this.path;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text, path ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
setMaterialOptions( value ) {
this.materialOptions = value;
return this;
}
/**
* Parses a MTL file.
*
* @param {String} text - Content of MTL file
* @return {MaterialCreator}
*
* @see setPath setResourcePath
*
* @note In order for relative texture references to resolve correctly
* you must call setResourcePath() explicitly prior to parse.
*/
parse( text, path ) {
const lines = text.split( '\n' );
let info = {};
const delimiter_pattern = /\s+/;
const materialsInfo = {};
for ( let i = 0; i < lines.length; i ++ ) {
let line = lines[ i ];
line = line.trim();
if ( line.length === 0 || line.charAt( 0 ) === '#' ) {
// Blank line or comment ignore
continue;
}
const pos = line.indexOf( ' ' );
let key = ( pos >= 0 ) ? line.substring( 0, pos ) : line;
key = key.toLowerCase();
let value = ( pos >= 0 ) ? line.substring( pos + 1 ) : '';
value = value.trim();
if ( key === 'newmtl' ) {
// New material
info = { name: value };
materialsInfo[ value ] = info;
} else {
if ( key === 'ka' || key === 'kd' || key === 'ks' || key === 'ke' ) {
const ss = value.split( delimiter_pattern, 3 );
info[ key ] = [ parseFloat( ss[ 0 ] ), parseFloat( ss[ 1 ] ), parseFloat( ss[ 2 ] ) ];
} else {
info[ key ] = value;
}
}
}
const materialCreator = new MaterialCreator( this.resourcePath || path, this.materialOptions );
materialCreator.setCrossOrigin( this.crossOrigin );
materialCreator.setManager( this.manager );
materialCreator.setMaterials( materialsInfo );
return materialCreator;
}
}
/**
* Create a new MTLLoader.MaterialCreator
* @param baseUrl - Url relative to which textures are loaded
* @param options - Set of options on how to construct the materials
* side: Which side to apply the material
* FrontSide (default), THREE.BackSide, THREE.DoubleSide
* wrap: What type of wrapping to apply for textures
* RepeatWrapping (default), THREE.ClampToEdgeWrapping, THREE.MirroredRepeatWrapping
* normalizeRGB: RGBs need to be normalized to 0-1 from 0-255
* Default: false, assumed to be already normalized
* ignoreZeroRGBs: Ignore values of RGBs (Ka,Kd,Ks) that are all 0's
* Default: false
* @constructor
*/
class MaterialCreator {
constructor( baseUrl = '', options = {} ) {
this.baseUrl = baseUrl;
this.options = options;
this.materialsInfo = {};
this.materials = {};
this.materialsArray = [];
this.nameLookup = {};
this.crossOrigin = 'anonymous';
this.side = ( this.options.side !== undefined ) ? this.options.side : FrontSide;
this.wrap = ( this.options.wrap !== undefined ) ? this.options.wrap : RepeatWrapping;
}
setCrossOrigin( value ) {
this.crossOrigin = value;
return this;
}
setManager( value ) {
this.manager = value;
}
setMaterials( materialsInfo ) {
this.materialsInfo = this.convert( materialsInfo );
this.materials = {};
this.materialsArray = [];
this.nameLookup = {};
}
convert( materialsInfo ) {
if ( ! this.options ) return materialsInfo;
const converted = {};
for ( const mn in materialsInfo ) {
// Convert materials info into normalized form based on options
const mat = materialsInfo[ mn ];
const covmat = {};
converted[ mn ] = covmat;
for ( const prop in mat ) {
let save = true;
let value = mat[ prop ];
const lprop = prop.toLowerCase();
switch ( lprop ) {
case 'kd':
case 'ka':
case 'ks':
// Diffuse color (color under white light) using RGB values
if ( this.options && this.options.normalizeRGB ) {
value = [ value[ 0 ] / 255, value[ 1 ] / 255, value[ 2 ] / 255 ];
}
if ( this.options && this.options.ignoreZeroRGBs ) {
if ( value[ 0 ] === 0 && value[ 1 ] === 0 && value[ 2 ] === 0 ) {
// ignore
save = false;
}
}
break;
default:
break;
}
if ( save ) {
covmat[ lprop ] = value;
}
}
}
return converted;
}
preload() {
for ( const mn in this.materialsInfo ) {
this.create( mn );
}
}
getIndex( materialName ) {
return this.nameLookup[ materialName ];
}
getAsArray() {
let index = 0;
for ( const mn in this.materialsInfo ) {
this.materialsArray[ index ] = this.create( mn );
this.nameLookup[ mn ] = index;
index ++;
}
return this.materialsArray;
}
create( materialName ) {
if ( this.materials[ materialName ] === undefined ) {
this.createMaterial_( materialName );
}
return this.materials[ materialName ];
}
createMaterial_( materialName ) {
// Create material
const scope = this;
const mat = this.materialsInfo[ materialName ];
const params = {
name: materialName,
side: this.side
};
function resolveURL( baseUrl, url ) {
if ( typeof url !== 'string' || url === '' )
return '';
// Absolute URL
if ( /^https?:\/\//i.test( url ) ) return url;
return baseUrl + url;
}
function setMapForType( mapType, value ) {
if ( params[ mapType ] ) return; // Keep the first encountered texture
const texParams = scope.getTextureParams( value, params );
const map = scope.loadTexture( resolveURL( scope.baseUrl, texParams.url ) );
map.repeat.copy( texParams.scale );
map.offset.copy( texParams.offset );
map.wrapS = scope.wrap;
map.wrapT = scope.wrap;
if ( mapType === 'map' || mapType === 'emissiveMap' ) {
map.colorSpace = SRGBColorSpace;
}
params[ mapType ] = map;
}
for ( const prop in mat ) {
const value = mat[ prop ];
let n;
if ( value === '' ) continue;
switch ( prop.toLowerCase() ) {
// Ns is material specular exponent
case 'kd':
// Diffuse color (color under white light) using RGB values
params.color = new Color().fromArray( value ).convertSRGBToLinear();
break;
case 'ks':
// Specular color (color when light is reflected from shiny surface) using RGB values
params.specular = new Color().fromArray( value ).convertSRGBToLinear();
break;
case 'ke':
// Emissive using RGB values
params.emissive = new Color().fromArray( value ).convertSRGBToLinear();
break;
case 'map_kd':
// Diffuse texture map
setMapForType( 'map', value );
break;
case 'map_ks':
// Specular map
setMapForType( 'specularMap', value );
break;
case 'map_ke':
// Emissive map
setMapForType( 'emissiveMap', value );
break;
case 'norm':
setMapForType( 'normalMap', value );
break;
case 'map_bump':
case 'bump':
// Bump texture map
setMapForType( 'bumpMap', value );
break;
case 'map_d':
// Alpha map
setMapForType( 'alphaMap', value );
params.transparent = true;
break;
case 'ns':
// The specular exponent (defines the focus of the specular highlight)
// A high exponent results in a tight, concentrated highlight. Ns values normally range from 0 to 1000.
params.shininess = parseFloat( value );
break;
case 'd':
n = parseFloat( value );
if ( n < 1 ) {
params.opacity = n;
params.transparent = true;
}
break;
case 'tr':
n = parseFloat( value );
if ( this.options && this.options.invertTrProperty ) n = 1 - n;
if ( n > 0 ) {
params.opacity = 1 - n;
params.transparent = true;
}
break;
default:
break;
}
}
this.materials[ materialName ] = new MeshPhongMaterial( params );
return this.materials[ materialName ];
}
getTextureParams( value, matParams ) {
const texParams = {
scale: new Vector2( 1, 1 ),
offset: new Vector2( 0, 0 )
};
const items = value.split( /\s+/ );
let pos;
pos = items.indexOf( '-bm' );
if ( pos >= 0 ) {
matParams.bumpScale = parseFloat( items[ pos + 1 ] );
items.splice( pos, 2 );
}
pos = items.indexOf( '-s' );
if ( pos >= 0 ) {
texParams.scale.set( parseFloat( items[ pos + 1 ] ), parseFloat( items[ pos + 2 ] ) );
items.splice( pos, 4 ); // we expect 3 parameters here!
}
pos = items.indexOf( '-o' );
if ( pos >= 0 ) {
texParams.offset.set( parseFloat( items[ pos + 1 ] ), parseFloat( items[ pos + 2 ] ) );
items.splice( pos, 4 ); // we expect 3 parameters here!
}
texParams.url = items.join( ' ' ).trim();
return texParams;
}
loadTexture( url, mapping, onLoad, onProgress, onError ) {
const manager = ( this.manager !== undefined ) ? this.manager : DefaultLoadingManager;
let loader = manager.getHandler( url );
if ( loader === null ) {
loader = new TextureLoader( manager );
}
if ( loader.setCrossOrigin ) loader.setCrossOrigin( this.crossOrigin );
const texture = loader.load( url, onLoad, onProgress, onError );
if ( mapping !== undefined ) texture.mapping = mapping;
return texture;
}
}
export { MTLLoader };

View File

@ -0,0 +1,845 @@
import {
FileLoader, Loader, TextureLoader, MeshBasicNodeMaterial, MeshPhysicalNodeMaterial, RepeatWrapping,
float, bool, int, vec2, vec3, vec4, color, texture,
positionLocal, positionWorld, uv, vertexColor,
normalLocal, normalWorld, tangentLocal, tangentWorld,
add, sub, mul, div, mod, abs, sign, floor, ceil, round, pow, sin, cos, tan,
asin, acos, atan2, sqrt, exp, clamp, min, max, normalize, length, dot, cross, normalMap,
remap, smoothstep, luminance, mx_rgbtohsv, mx_hsvtorgb,
mix,
mx_ramplr, mx_ramptb, mx_splitlr, mx_splittb,
mx_fractal_noise_float, mx_noise_float, mx_cell_noise_float, mx_worley_noise_float,
mx_transform_uv,
mx_safepower, mx_contrast,
mx_srgb_texture_to_lin_rec709,
saturation
} from '/static/javascript/three/build/three.module.js';
const colorSpaceLib = {
mx_srgb_texture_to_lin_rec709
};
class MXElement {
constructor( name, nodeFunc, params = null ) {
this.name = name;
this.nodeFunc = nodeFunc;
this.params = params;
}
}
// Ref: https://github.com/mrdoob/three.js/issues/24674
const mx_add = ( in1, in2 = float( 0 ) ) => add( in1, in2 );
const mx_subtract = ( in1, in2 = float( 0 ) ) => sub( in1, in2 );
const mx_multiply = ( in1, in2 = float( 1 ) ) => mul( in1, in2 );
const mx_divide = ( in1, in2 = float( 1 ) ) => div( in1, in2 );
const mx_modulo = ( in1, in2 = float( 1 ) ) => mod( in1, in2 );
const mx_power = ( in1, in2 = float( 1 ) ) => pow( in1, in2 );
const mx_atan2 = ( in1 = float( 0 ), in2 = float( 1 ) ) => atan2( in1, in2 );
const MXElements = [
// << Math >>
new MXElement( 'add', mx_add, [ 'in1', 'in2' ] ),
new MXElement( 'subtract', mx_subtract, [ 'in1', 'in2' ] ),
new MXElement( 'multiply', mx_multiply, [ 'in1', 'in2' ] ),
new MXElement( 'divide', mx_divide, [ 'in1', 'in2' ] ),
new MXElement( 'modulo', mx_modulo, [ 'in1', 'in2' ] ),
new MXElement( 'absval', abs, [ 'in1', 'in2' ] ),
new MXElement( 'sign', sign, [ 'in1', 'in2' ] ),
new MXElement( 'floor', floor, [ 'in1', 'in2' ] ),
new MXElement( 'ceil', ceil, [ 'in1', 'in2' ] ),
new MXElement( 'round', round, [ 'in1', 'in2' ] ),
new MXElement( 'power', mx_power, [ 'in1', 'in2' ] ),
new MXElement( 'sin', sin, [ 'in' ] ),
new MXElement( 'cos', cos, [ 'in' ] ),
new MXElement( 'tan', tan, [ 'in' ] ),
new MXElement( 'asin', asin, [ 'in' ] ),
new MXElement( 'acos', acos, [ 'in' ] ),
new MXElement( 'atan2', mx_atan2, [ 'in1', 'in2' ] ),
new MXElement( 'sqrt', sqrt, [ 'in' ] ),
//new MtlXElement( 'ln', ... ),
new MXElement( 'exp', exp, [ 'in' ] ),
new MXElement( 'clamp', clamp, [ 'in', 'low', 'high' ] ),
new MXElement( 'min', min, [ 'in1', 'in2' ] ),
new MXElement( 'max', max, [ 'in1', 'in2' ] ),
new MXElement( 'normalize', normalize, [ 'in' ] ),
new MXElement( 'magnitude', length, [ 'in1', 'in2' ] ),
new MXElement( 'dotproduct', dot, [ 'in1', 'in2' ] ),
new MXElement( 'crossproduct', cross, [ 'in' ] ),
//new MtlXElement( 'transformpoint', ... ),
//new MtlXElement( 'transformvector', ... ),
//new MtlXElement( 'transformnormal', ... ),
//new MtlXElement( 'transformmatrix', ... ),
new MXElement( 'normalmap', normalMap, [ 'in', 'scale' ] ),
//new MtlXElement( 'transpose', ... ),
//new MtlXElement( 'determinant', ... ),
//new MtlXElement( 'invertmatrix', ... ),
//new MtlXElement( 'rotate2d', rotateUV, [ 'in', radians( 'amount' )** ] ),
//new MtlXElement( 'rotate3d', ... ),
//new MtlXElement( 'arrayappend', ... ),
//new MtlXElement( 'dot', ... ),
// << Adjustment >>
new MXElement( 'remap', remap, [ 'in', 'inlow', 'inhigh', 'outlow', 'outhigh' ] ),
new MXElement( 'smoothstep', smoothstep, [ 'in', 'low', 'high' ] ),
//new MtlXElement( 'curveadjust', ... ),
//new MtlXElement( 'curvelookup', ... ),
new MXElement( 'luminance', luminance, [ 'in', 'lumacoeffs' ] ),
new MXElement( 'rgbtohsv', mx_rgbtohsv, [ 'in' ] ),
new MXElement( 'hsvtorgb', mx_hsvtorgb, [ 'in' ] ),
// << Mix >>
new MXElement( 'mix', mix, [ 'bg', 'fg', 'mix' ] ),
// << Channel >>
new MXElement( 'combine2', vec2, [ 'in1', 'in2' ] ),
new MXElement( 'combine3', vec3, [ 'in1', 'in2', 'in3' ] ),
new MXElement( 'combine4', vec4, [ 'in1', 'in2', 'in3', 'in4' ] ),
// << Procedural >>
new MXElement( 'ramplr', mx_ramplr, [ 'valuel', 'valuer', 'texcoord' ] ),
new MXElement( 'ramptb', mx_ramptb, [ 'valuet', 'valueb', 'texcoord' ] ),
new MXElement( 'splitlr', mx_splitlr, [ 'valuel', 'valuer', 'texcoord' ] ),
new MXElement( 'splittb', mx_splittb, [ 'valuet', 'valueb', 'texcoord' ] ),
new MXElement( 'noise2d', mx_noise_float, [ 'texcoord', 'amplitude', 'pivot' ] ),
new MXElement( 'noise3d', mx_noise_float, [ 'texcoord', 'amplitude', 'pivot' ] ),
new MXElement( 'fractal3d', mx_fractal_noise_float, [ 'position', 'octaves', 'lacunarity', 'diminish', 'amplitude' ] ),
new MXElement( 'cellnoise2d', mx_cell_noise_float, [ 'texcoord' ] ),
new MXElement( 'cellnoise3d', mx_cell_noise_float, [ 'texcoord' ] ),
new MXElement( 'worleynoise2d', mx_worley_noise_float, [ 'texcoord', 'jitter' ] ),
new MXElement( 'worleynoise3d', mx_worley_noise_float, [ 'texcoord', 'jitter' ] ),
// << Supplemental >>
//new MtlXElement( 'tiledimage', ... ),
//new MtlXElement( 'triplanarprojection', triplanarTextures, [ 'filex', 'filey', 'filez' ] ),
//new MtlXElement( 'ramp4', ... ),
//new MtlXElement( 'place2d', mx_place2d, [ 'texcoord', 'pivot', 'scale', 'rotate', 'offset' ] ),
new MXElement( 'safepower', mx_safepower, [ 'in1', 'in2' ] ),
new MXElement( 'contrast', mx_contrast, [ 'in', 'amount', 'pivot' ] ),
//new MtlXElement( 'hsvadjust', ... ),
new MXElement( 'saturate', saturation, [ 'in', 'amount' ] ),
//new MtlXElement( 'extract', ... ),
//new MtlXElement( 'separate2', ... ),
//new MtlXElement( 'separate3', ... ),
//new MtlXElement( 'separate4', ... )
];
const MtlXLibrary = {};
MXElements.forEach( element => MtlXLibrary[ element.name ] = element );
class MaterialXLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const _onError = function ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
};
new FileLoader( this.manager )
.setPath( this.path )
.load( url, async ( text ) => {
try {
onLoad( this.parse( text ) );
} catch ( e ) {
_onError( e );
}
}, onProgress, _onError );
return this;
}
parse( text ) {
return new MaterialX( this.manager, this.path ).parse( text );
}
}
class MaterialXNode {
constructor( materialX, nodeXML, nodePath = '' ) {
this.materialX = materialX;
this.nodeXML = nodeXML;
this.nodePath = nodePath ? nodePath + '/' + this.name : this.name;
this.parent = null;
this.node = null;
this.children = [];
}
get element() {
return this.nodeXML.nodeName;
}
get nodeGraph() {
return this.getAttribute( 'nodegraph' );
}
get nodeName() {
return this.getAttribute( 'nodename' );
}
get interfaceName() {
return this.getAttribute( 'interfacename' );
}
get output() {
return this.getAttribute( 'output' );
}
get name() {
return this.getAttribute( 'name' );
}
get type() {
return this.getAttribute( 'type' );
}
get value() {
return this.getAttribute( 'value' );
}
getNodeGraph() {
let nodeX = this;
while ( nodeX !== null ) {
if ( nodeX.element === 'nodegraph' ) {
break;
}
nodeX = nodeX.parent;
}
return nodeX;
}
getRoot() {
let nodeX = this;
while ( nodeX.parent !== null ) {
nodeX = nodeX.parent;
}
return nodeX;
}
get referencePath() {
let referencePath = null;
if ( this.nodeGraph !== null && this.output !== null ) {
referencePath = this.nodeGraph + '/' + this.output;
} else if ( this.nodeName !== null || this.interfaceName !== null ) {
referencePath = this.getNodeGraph().nodePath + '/' + ( this.nodeName || this.interfaceName );
}
return referencePath;
}
get hasReference() {
return this.referencePath !== null;
}
get isConst() {
return this.element === 'input' && this.value !== null && this.type !== 'filename';
}
getColorSpaceNode() {
const csSource = this.getAttribute( 'colorspace' );
const csTarget = this.getRoot().getAttribute( 'colorspace' );
const nodeName = `mx_${ csSource }_to_${ csTarget }`;
return colorSpaceLib[ nodeName ];
}
getTexture() {
const filePrefix = this.getRecursiveAttribute( 'fileprefix' ) || '';
let loader = this.materialX.textureLoader;
const uri = filePrefix + this.value;
if ( uri ) {
const handler = this.materialX.manager.getHandler( uri );
if ( handler !== null ) loader = handler;
}
const texture = loader.load( uri );
texture.wrapS = texture.wrapT = RepeatWrapping;
texture.flipY = false;
return texture;
}
getClassFromType( type ) {
let nodeClass = null;
if ( type === 'integer' ) nodeClass = int;
else if ( type === 'float' ) nodeClass = float;
else if ( type === 'vector2' ) nodeClass = vec2;
else if ( type === 'vector3' ) nodeClass = vec3;
else if ( type === 'vector4' || type === 'color4' ) nodeClass = vec4;
else if ( type === 'color3' ) nodeClass = color;
else if ( type === 'boolean' ) nodeClass = bool;
return nodeClass;
}
getNode() {
let node = this.node;
if ( node !== null ) {
return node;
}
//
const type = this.type;
if ( this.isConst ) {
const nodeClass = this.getClassFromType( type );
node = nodeClass( ...this.getVector() );
} else if ( this.hasReference ) {
node = this.materialX.getMaterialXNode( this.referencePath ).getNode();
} else {
const element = this.element;
if ( element === 'convert' ) {
const nodeClass = this.getClassFromType( type );
node = nodeClass( this.getNodeByName( 'in' ) );
} else if ( element === 'constant' ) {
node = this.getNodeByName( 'value' );
} else if ( element === 'position' ) {
const space = this.getAttribute( 'space' );
node = space === 'world' ? positionWorld : positionLocal;
} else if ( element === 'normal' ) {
const space = this.getAttribute( 'space' );
node = space === 'world' ? normalWorld : normalLocal;
} else if ( element === 'tangent' ) {
const space = this.getAttribute( 'space' );
node = space === 'world' ? tangentWorld : tangentLocal;
} else if ( element === 'texcoord' ) {
const indexNode = this.getChildByName( 'index' );
const index = indexNode ? parseInt( indexNode.value ) : 0;
node = uv( index );
} else if ( element === 'geomcolor' ) {
const indexNode = this.getChildByName( 'index' );
const index = indexNode ? parseInt( indexNode.value ) : 0;
node = vertexColor( index );
} else if ( element === 'tiledimage' ) {
const file = this.getChildByName( 'file' );
const textureFile = file.getTexture();
const uvTiling = mx_transform_uv( ...this.getNodesByNames( [ 'uvtiling', 'uvoffset' ] ) );
node = texture( textureFile, uvTiling );
const colorSpaceNode = file.getColorSpaceNode();
if ( colorSpaceNode ) {
node = colorSpaceNode( node );
}
} else if ( element === 'image' ) {
const file = this.getChildByName( 'file' );
const uvNode = this.getNodeByName( 'texcoord' );
const textureFile = file.getTexture();
node = texture( textureFile, uvNode );
const colorSpaceNode = file.getColorSpaceNode();
if ( colorSpaceNode ) {
node = colorSpaceNode( node );
}
} else if ( MtlXLibrary[ element ] !== undefined ) {
const nodeElement = MtlXLibrary[ element ];
node = nodeElement.nodeFunc( ...this.getNodesByNames( ...nodeElement.params ) );
}
}
//
if ( node === null ) {
console.warn( `THREE.MaterialXLoader: Unexpected node ${ new XMLSerializer().serializeToString( this.nodeXML ) }.` );
node = float( 0 );
}
//
const nodeToTypeClass = this.getClassFromType( type );
if ( nodeToTypeClass !== null ) {
node = nodeToTypeClass( node );
}
node.name = this.name;
this.node = node;
return node;
}
getChildByName( name ) {
for ( const input of this.children ) {
if ( input.name === name ) {
return input;
}
}
}
getNodes() {
const nodes = {};
for ( const input of this.children ) {
const node = input.getNode();
nodes[ node.name ] = node;
}
return nodes;
}
getNodeByName( name ) {
const child = this.getChildByName( name );
return child ? child.getNode() : undefined;
}
getNodesByNames( ...names ) {
const nodes = [];
for ( const name of names ) {
const node = this.getNodeByName( name );
if ( node ) nodes.push( node );
}
return nodes;
}
getValue() {
return this.value.trim();
}
getVector() {
const vector = [];
for ( const val of this.getValue().split( /[,|\s]/ ) ) {
if ( val !== '' ) {
vector.push( Number( val.trim() ) );
}
}
return vector;
}
getAttribute( name ) {
return this.nodeXML.getAttribute( name );
}
getRecursiveAttribute( name ) {
let attribute = this.nodeXML.getAttribute( name );
if ( attribute === null && this.parent !== null ) {
attribute = this.parent.getRecursiveAttribute( name );
}
return attribute;
}
setStandardSurfaceToGltfPBR( material ) {
const inputs = this.getNodes();
//
let colorNode = null;
if ( inputs.base && inputs.base_color ) colorNode = mul( inputs.base, inputs.base_color );
else if ( inputs.base ) colorNode = inputs.base;
else if ( inputs.base_color ) colorNode = inputs.base_color;
//
let roughnessNode = null;
if ( inputs.specular_roughness ) roughnessNode = inputs.specular_roughness;
//
let metalnessNode = null;
if ( inputs.metalness ) metalnessNode = inputs.metalness;
//
let clearcoatNode = null;
let clearcoatRoughnessNode = null;
if ( inputs.coat ) clearcoatNode = inputs.coat;
if ( inputs.coat_roughness ) clearcoatRoughnessNode = inputs.coat_roughness;
if ( inputs.coat_color ) {
colorNode = colorNode ? mul( colorNode, inputs.coat_color ) : colorNode;
}
//
let normalNode = null;
if ( inputs.normal ) normalNode = inputs.normal;
//
let emissiveNode = null;
if ( inputs.emission ) emissiveNode = inputs.emission;
if ( inputs.emissionColor ) {
emissiveNode = emissiveNode ? mul( emissiveNode, inputs.emissionColor ) : emissiveNode;
}
//
material.colorNode = colorNode || color( 0.8, 0.8, 0.8 );
material.roughnessNode = roughnessNode || float( 0.2 );
material.metalnessNode = metalnessNode || float( 0 );
material.clearcoatNode = clearcoatNode || float( 0 );
material.clearcoatRoughnessNode = clearcoatRoughnessNode || float( 0 );
if ( normalNode ) material.normalNode = normalNode;
if ( emissiveNode ) material.emissiveNode = emissiveNode;
}
/*setGltfPBR( material ) {
const inputs = this.getNodes();
console.log( inputs );
}*/
setMaterial( material ) {
const element = this.element;
if ( element === 'gltf_pbr' ) {
//this.setGltfPBR( material );
} else if ( element === 'standard_surface' ) {
this.setStandardSurfaceToGltfPBR( material );
}
}
toBasicMaterial() {
const material = new MeshBasicNodeMaterial();
material.name = this.name;
for ( const nodeX of this.children.toReversed() ) {
if ( nodeX.name === 'out' ) {
material.colorNode = nodeX.getNode();
break;
}
}
return material;
}
toPhysicalMaterial() {
const material = new MeshPhysicalNodeMaterial();
material.name = this.name;
for ( const nodeX of this.children ) {
const shaderProperties = this.materialX.getMaterialXNode( nodeX.nodeName );
shaderProperties.setMaterial( material );
}
return material;
}
toMaterials() {
const materials = {};
let isUnlit = true;
for ( const nodeX of this.children ) {
if ( nodeX.element === 'surfacematerial' ) {
const material = nodeX.toPhysicalMaterial();
materials[ material.name ] = material;
isUnlit = false;
}
}
if ( isUnlit ) {
for ( const nodeX of this.children ) {
if ( nodeX.element === 'nodegraph' ) {
const material = nodeX.toBasicMaterial();
materials[ material.name ] = material;
}
}
}
return materials;
}
add( materialXNode ) {
materialXNode.parent = this;
this.children.push( materialXNode );
}
}
class MaterialX {
constructor( manager, path ) {
this.manager = manager;
this.path = path;
this.resourcePath = '';
this.nodesXLib = new Map();
//this.nodesXRefLib = new WeakMap();
this.textureLoader = new TextureLoader( manager );
}
addMaterialXNode( materialXNode ) {
this.nodesXLib.set( materialXNode.nodePath, materialXNode );
}
/*getMaterialXNodeFromXML( xmlNode ) {
return this.nodesXRefLib.get( xmlNode );
}*/
getMaterialXNode( ...names ) {
return this.nodesXLib.get( names.join( '/' ) );
}
parseNode( nodeXML, nodePath = '' ) {
const materialXNode = new MaterialXNode( this, nodeXML, nodePath );
if ( materialXNode.nodePath ) this.addMaterialXNode( materialXNode );
for ( const childNodeXML of nodeXML.children ) {
const childMXNode = this.parseNode( childNodeXML, materialXNode.nodePath );
materialXNode.add( childMXNode );
}
return materialXNode;
}
parse( text ) {
const rootXML = new DOMParser().parseFromString( text, 'application/xml' ).documentElement;
this.textureLoader.setPath( this.path );
//
const materials = this.parseNode( rootXML ).toMaterials();
return { materials };
}
}
export { MaterialXLoader };

View File

@ -0,0 +1,56 @@
/**
* Mirror Shader
* Copies half the input to the other half
*
* side: side of input to mirror (0 = left, 1 = right, 2 = top, 3 = bottom)
*/
const MirrorShader = {
name: 'MirrorShader',
uniforms: {
'tDiffuse': { value: null },
'side': { value: 1 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform int side;
varying vec2 vUv;
void main() {
vec2 p = vUv;
if (side == 0){
if (p.x > 0.5) p.x = 1.0 - p.x;
}else if (side == 1){
if (p.x < 0.5) p.x = 1.0 - p.x;
}else if (side == 2){
if (p.y < 0.5) p.y = 1.0 - p.y;
}else if (side == 3){
if (p.y > 0.5) p.y = 1.0 - p.y;
}
vec4 color = texture2D(tDiffuse, p);
gl_FragColor = color;
}`
};
export { MirrorShader };

View File

@ -0,0 +1,686 @@
import {
FileLoader,
Loader,
Matrix4,
Vector3
} from '/static/javascript/three/build/three.module.js';
import * as fflate from '../libs/fflate.module.js';
import { Volume } from '../misc/Volume.js';
class NRRDLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( data ) {
try {
onLoad( scope.parse( data ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
/**
*
* @param {boolean} segmentation is a option for user to choose
*/
setSegmentation( segmentation ) {
this.segmentation = segmentation;
}
parse( data ) {
// this parser is largely inspired from the XTK NRRD parser : https://github.com/xtk/X
let _data = data;
let _dataPointer = 0;
const _nativeLittleEndian = new Int8Array( new Int16Array( [ 1 ] ).buffer )[ 0 ] > 0;
const _littleEndian = true;
const headerObject = {};
function scan( type, chunks ) {
let _chunkSize = 1;
let _array_type = Uint8Array;
switch ( type ) {
// 1 byte data types
case 'uchar':
break;
case 'schar':
_array_type = Int8Array;
break;
// 2 byte data types
case 'ushort':
_array_type = Uint16Array;
_chunkSize = 2;
break;
case 'sshort':
_array_type = Int16Array;
_chunkSize = 2;
break;
// 4 byte data types
case 'uint':
_array_type = Uint32Array;
_chunkSize = 4;
break;
case 'sint':
_array_type = Int32Array;
_chunkSize = 4;
break;
case 'float':
_array_type = Float32Array;
_chunkSize = 4;
break;
case 'complex':
_array_type = Float64Array;
_chunkSize = 8;
break;
case 'double':
_array_type = Float64Array;
_chunkSize = 8;
break;
}
// increase the data pointer in-place
let _bytes = new _array_type( _data.slice( _dataPointer,
_dataPointer += chunks * _chunkSize ) );
// if required, flip the endianness of the bytes
if ( _nativeLittleEndian != _littleEndian ) {
// we need to flip here since the format doesn't match the native endianness
_bytes = flipEndianness( _bytes, _chunkSize );
}
// return the byte array
return _bytes;
}
//Flips typed array endianness in-place. Based on https://github.com/kig/DataStream.js/blob/master/DataStream.js.
function flipEndianness( array, chunkSize ) {
const u8 = new Uint8Array( array.buffer, array.byteOffset, array.byteLength );
for ( let i = 0; i < array.byteLength; i += chunkSize ) {
for ( let j = i + chunkSize - 1, k = i; j > k; j --, k ++ ) {
const tmp = u8[ k ];
u8[ k ] = u8[ j ];
u8[ j ] = tmp;
}
}
return array;
}
//parse the header
function parseHeader( header ) {
let data, field, fn, i, l, m, _i, _len;
const lines = header.split( /\r?\n/ );
for ( _i = 0, _len = lines.length; _i < _len; _i ++ ) {
l = lines[ _i ];
if ( l.match( /NRRD\d+/ ) ) {
headerObject.isNrrd = true;
} else if ( ! l.match( /^#/ ) && ( m = l.match( /(.*):(.*)/ ) ) ) {
field = m[ 1 ].trim();
data = m[ 2 ].trim();
fn = _fieldFunctions[ field ];
if ( fn ) {
fn.call( headerObject, data );
} else {
headerObject[ field ] = data;
}
}
}
if ( ! headerObject.isNrrd ) {
throw new Error( 'Not an NRRD file' );
}
if ( headerObject.encoding === 'bz2' || headerObject.encoding === 'bzip2' ) {
throw new Error( 'Bzip is not supported' );
}
if ( ! headerObject.vectors ) {
//if no space direction is set, let's use the identity
headerObject.vectors = [ ];
headerObject.vectors.push( [ 1, 0, 0 ] );
headerObject.vectors.push( [ 0, 1, 0 ] );
headerObject.vectors.push( [ 0, 0, 1 ] );
//apply spacing if defined
if ( headerObject.spacings ) {
for ( i = 0; i <= 2; i ++ ) {
if ( ! isNaN( headerObject.spacings[ i ] ) ) {
for ( let j = 0; j <= 2; j ++ ) {
headerObject.vectors[ i ][ j ] *= headerObject.spacings[ i ];
}
}
}
}
}
}
//parse the data when registred as one of this type : 'text', 'ascii', 'txt'
function parseDataAsText( data, start, end ) {
let number = '';
start = start || 0;
end = end || data.length;
let value;
//length of the result is the product of the sizes
const lengthOfTheResult = headerObject.sizes.reduce( function ( previous, current ) {
return previous * current;
}, 1 );
let base = 10;
if ( headerObject.encoding === 'hex' ) {
base = 16;
}
const result = new headerObject.__array( lengthOfTheResult );
let resultIndex = 0;
let parsingFunction = parseInt;
if ( headerObject.__array === Float32Array || headerObject.__array === Float64Array ) {
parsingFunction = parseFloat;
}
for ( let i = start; i < end; i ++ ) {
value = data[ i ];
//if value is not a space
if ( ( value < 9 || value > 13 ) && value !== 32 ) {
number += String.fromCharCode( value );
} else {
if ( number !== '' ) {
result[ resultIndex ] = parsingFunction( number, base );
resultIndex ++;
}
number = '';
}
}
if ( number !== '' ) {
result[ resultIndex ] = parsingFunction( number, base );
resultIndex ++;
}
return result;
}
const _bytes = scan( 'uchar', data.byteLength );
const _length = _bytes.length;
let _header = null;
let _data_start = 0;
let i;
for ( i = 1; i < _length; i ++ ) {
if ( _bytes[ i - 1 ] == 10 && _bytes[ i ] == 10 ) {
// we found two line breaks in a row
// now we know what the header is
_header = this.parseChars( _bytes, 0, i - 2 );
// this is were the data starts
_data_start = i + 1;
break;
}
}
// parse the header
parseHeader( _header );
_data = _bytes.subarray( _data_start ); // the data without header
if ( headerObject.encoding.substring( 0, 2 ) === 'gz' ) {
// we need to decompress the datastream
// here we start the unzipping and get a typed Uint8Array back
_data = fflate.gunzipSync( new Uint8Array( _data ) );
} else if ( headerObject.encoding === 'ascii' || headerObject.encoding === 'text' || headerObject.encoding === 'txt' || headerObject.encoding === 'hex' ) {
_data = parseDataAsText( _data );
} else if ( headerObject.encoding === 'raw' ) {
//we need to copy the array to create a new array buffer, else we retrieve the original arraybuffer with the header
const _copy = new Uint8Array( _data.length );
for ( let i = 0; i < _data.length; i ++ ) {
_copy[ i ] = _data[ i ];
}
_data = _copy;
}
// .. let's use the underlying array buffer
_data = _data.buffer;
const volume = new Volume();
volume.header = headerObject;
volume.segmentation = this.segmentation;
//
// parse the (unzipped) data to a datastream of the correct type
//
volume.data = new headerObject.__array( _data );
// get the min and max intensities
const min_max = volume.computeMinMax();
const min = min_max[ 0 ];
const max = min_max[ 1 ];
// attach the scalar range to the volume
volume.windowLow = min;
volume.windowHigh = max;
// get the image dimensions
volume.dimensions = [ headerObject.sizes[ 0 ], headerObject.sizes[ 1 ], headerObject.sizes[ 2 ] ];
volume.xLength = volume.dimensions[ 0 ];
volume.yLength = volume.dimensions[ 1 ];
volume.zLength = volume.dimensions[ 2 ];
// Identify axis order in the space-directions matrix from the header if possible.
if ( headerObject.vectors ) {
const xIndex = headerObject.vectors.findIndex( vector => vector[ 0 ] !== 0 );
const yIndex = headerObject.vectors.findIndex( vector => vector[ 1 ] !== 0 );
const zIndex = headerObject.vectors.findIndex( vector => vector[ 2 ] !== 0 );
const axisOrder = [];
if ( xIndex !== yIndex && xIndex !== zIndex && yIndex !== zIndex ) {
axisOrder[ xIndex ] = 'x';
axisOrder[ yIndex ] = 'y';
axisOrder[ zIndex ] = 'z';
} else {
axisOrder[ 0 ] = 'x';
axisOrder[ 1 ] = 'y';
axisOrder[ 2 ] = 'z';
}
volume.axisOrder = axisOrder;
} else {
volume.axisOrder = [ 'x', 'y', 'z' ];
}
// spacing
const spacingX = new Vector3().fromArray( headerObject.vectors[ 0 ] ).length();
const spacingY = new Vector3().fromArray( headerObject.vectors[ 1 ] ).length();
const spacingZ = new Vector3().fromArray( headerObject.vectors[ 2 ] ).length();
volume.spacing = [ spacingX, spacingY, spacingZ ];
// Create IJKtoRAS matrix
volume.matrix = new Matrix4();
const transitionMatrix = new Matrix4();
if ( headerObject.space === 'left-posterior-superior' ) {
transitionMatrix.set(
- 1, 0, 0, 0,
0, - 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
);
} else if ( headerObject.space === 'left-anterior-superior' ) {
transitionMatrix.set(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, - 1, 0,
0, 0, 0, 1
);
}
if ( ! headerObject.vectors ) {
volume.matrix.set(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1 );
} else {
const v = headerObject.vectors;
const ijk_to_transition = new Matrix4().set(
v[ 0 ][ 0 ], v[ 1 ][ 0 ], v[ 2 ][ 0 ], 0,
v[ 0 ][ 1 ], v[ 1 ][ 1 ], v[ 2 ][ 1 ], 0,
v[ 0 ][ 2 ], v[ 1 ][ 2 ], v[ 2 ][ 2 ], 0,
0, 0, 0, 1
);
const transition_to_ras = new Matrix4().multiplyMatrices( ijk_to_transition, transitionMatrix );
volume.matrix = transition_to_ras;
}
volume.inverseMatrix = new Matrix4();
volume.inverseMatrix.copy( volume.matrix ).invert();
volume.RASDimensions = [
Math.floor( volume.xLength * spacingX ),
Math.floor( volume.yLength * spacingY ),
Math.floor( volume.zLength * spacingZ )
];
// .. and set the default threshold
// only if the threshold was not already set
if ( volume.lowerThreshold === - Infinity ) {
volume.lowerThreshold = min;
}
if ( volume.upperThreshold === Infinity ) {
volume.upperThreshold = max;
}
return volume;
}
parseChars( array, start, end ) {
// without borders, use the whole array
if ( start === undefined ) {
start = 0;
}
if ( end === undefined ) {
end = array.length;
}
let output = '';
// create and append the chars
let i = 0;
for ( i = start; i < end; ++ i ) {
output += String.fromCharCode( array[ i ] );
}
return output;
}
}
const _fieldFunctions = {
type: function ( data ) {
switch ( data ) {
case 'uchar':
case 'unsigned char':
case 'uint8':
case 'uint8_t':
this.__array = Uint8Array;
break;
case 'signed char':
case 'int8':
case 'int8_t':
this.__array = Int8Array;
break;
case 'short':
case 'short int':
case 'signed short':
case 'signed short int':
case 'int16':
case 'int16_t':
this.__array = Int16Array;
break;
case 'ushort':
case 'unsigned short':
case 'unsigned short int':
case 'uint16':
case 'uint16_t':
this.__array = Uint16Array;
break;
case 'int':
case 'signed int':
case 'int32':
case 'int32_t':
this.__array = Int32Array;
break;
case 'uint':
case 'unsigned int':
case 'uint32':
case 'uint32_t':
this.__array = Uint32Array;
break;
case 'float':
this.__array = Float32Array;
break;
case 'double':
this.__array = Float64Array;
break;
default:
throw new Error( 'Unsupported NRRD data type: ' + data );
}
return this.type = data;
},
endian: function ( data ) {
return this.endian = data;
},
encoding: function ( data ) {
return this.encoding = data;
},
dimension: function ( data ) {
return this.dim = parseInt( data, 10 );
},
sizes: function ( data ) {
let i;
return this.sizes = ( function () {
const _ref = data.split( /\s+/ );
const _results = [];
for ( let _i = 0, _len = _ref.length; _i < _len; _i ++ ) {
i = _ref[ _i ];
_results.push( parseInt( i, 10 ) );
}
return _results;
} )();
},
space: function ( data ) {
return this.space = data;
},
'space origin': function ( data ) {
return this.space_origin = data.split( '(' )[ 1 ].split( ')' )[ 0 ].split( ',' );
},
'space directions': function ( data ) {
let f, v;
const parts = data.match( /\(.*?\)/g );
return this.vectors = ( function () {
const _results = [];
for ( let _i = 0, _len = parts.length; _i < _len; _i ++ ) {
v = parts[ _i ];
_results.push( ( function () {
const _ref = v.slice( 1, - 1 ).split( /,/ );
const _results2 = [];
for ( let _j = 0, _len2 = _ref.length; _j < _len2; _j ++ ) {
f = _ref[ _j ];
_results2.push( parseFloat( f ) );
}
return _results2;
} )() );
}
return _results;
} )();
},
spacings: function ( data ) {
let f;
const parts = data.split( /\s+/ );
return this.spacings = ( function () {
const _results = [];
for ( let _i = 0, _len = parts.length; _i < _len; _i ++ ) {
f = parts[ _i ];
_results.push( parseFloat( f ) );
}
return _results;
} )();
}
};
export { NRRDLoader };

View File

@ -0,0 +1,55 @@
import {
Vector2
} from '/static/javascript/three/build/three.module.js';
/**
* Normal map shader
* - compute normals from heightmap
*/
const NormalMapShader = {
name: 'NormalMapShader',
uniforms: {
'heightMap': { value: null },
'resolution': { value: new Vector2( 512, 512 ) },
'scale': { value: new Vector2( 1, 1 ) },
'height': { value: 0.05 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform float height;
uniform vec2 resolution;
uniform sampler2D heightMap;
varying vec2 vUv;
void main() {
float val = texture2D( heightMap, vUv ).x;
float valU = texture2D( heightMap, vUv + vec2( 1.0 / resolution.x, 0.0 ) ).x;
float valV = texture2D( heightMap, vUv + vec2( 0.0, 1.0 / resolution.y ) ).x;
gl_FragColor = vec4( ( 0.5 * normalize( vec3( val - valU, val - valV, height ) ) + 0.5 ), 1.0 );
}`
};
export { NormalMapShader };

View File

@ -0,0 +1,905 @@
import {
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Group,
LineBasicMaterial,
LineSegments,
Loader,
Material,
Mesh,
MeshPhongMaterial,
Points,
PointsMaterial,
Vector3,
Color
} from '/static/javascript/three/build/three.module.js';
// o object_name | g group_name
const _object_pattern = /^[og]\s*(.+)?/;
// mtllib file_reference
const _material_library_pattern = /^mtllib /;
// usemtl material_name
const _material_use_pattern = /^usemtl /;
// usemap map_name
const _map_use_pattern = /^usemap /;
const _face_vertex_data_separator_pattern = /\s+/;
const _vA = new Vector3();
const _vB = new Vector3();
const _vC = new Vector3();
const _ab = new Vector3();
const _cb = new Vector3();
const _color = new Color();
function ParserState() {
const state = {
objects: [],
object: {},
vertices: [],
normals: [],
colors: [],
uvs: [],
materials: {},
materialLibraries: [],
startObject: function ( name, fromDeclaration ) {
// If the current object (initial from reset) is not from a g/o declaration in the parsed
// file. We need to use it for the first parsed g/o to keep things in sync.
if ( this.object && this.object.fromDeclaration === false ) {
this.object.name = name;
this.object.fromDeclaration = ( fromDeclaration !== false );
return;
}
const previousMaterial = ( this.object && typeof this.object.currentMaterial === 'function' ? this.object.currentMaterial() : undefined );
if ( this.object && typeof this.object._finalize === 'function' ) {
this.object._finalize( true );
}
this.object = {
name: name || '',
fromDeclaration: ( fromDeclaration !== false ),
geometry: {
vertices: [],
normals: [],
colors: [],
uvs: [],
hasUVIndices: false
},
materials: [],
smooth: true,
startMaterial: function ( name, libraries ) {
const previous = this._finalize( false );
// New usemtl declaration overwrites an inherited material, except if faces were declared
// after the material, then it must be preserved for proper MultiMaterial continuation.
if ( previous && ( previous.inherited || previous.groupCount <= 0 ) ) {
this.materials.splice( previous.index, 1 );
}
const material = {
index: this.materials.length,
name: name || '',
mtllib: ( Array.isArray( libraries ) && libraries.length > 0 ? libraries[ libraries.length - 1 ] : '' ),
smooth: ( previous !== undefined ? previous.smooth : this.smooth ),
groupStart: ( previous !== undefined ? previous.groupEnd : 0 ),
groupEnd: - 1,
groupCount: - 1,
inherited: false,
clone: function ( index ) {
const cloned = {
index: ( typeof index === 'number' ? index : this.index ),
name: this.name,
mtllib: this.mtllib,
smooth: this.smooth,
groupStart: 0,
groupEnd: - 1,
groupCount: - 1,
inherited: false
};
cloned.clone = this.clone.bind( cloned );
return cloned;
}
};
this.materials.push( material );
return material;
},
currentMaterial: function () {
if ( this.materials.length > 0 ) {
return this.materials[ this.materials.length - 1 ];
}
return undefined;
},
_finalize: function ( end ) {
const lastMultiMaterial = this.currentMaterial();
if ( lastMultiMaterial && lastMultiMaterial.groupEnd === - 1 ) {
lastMultiMaterial.groupEnd = this.geometry.vertices.length / 3;
lastMultiMaterial.groupCount = lastMultiMaterial.groupEnd - lastMultiMaterial.groupStart;
lastMultiMaterial.inherited = false;
}
// Ignore objects tail materials if no face declarations followed them before a new o/g started.
if ( end && this.materials.length > 1 ) {
for ( let mi = this.materials.length - 1; mi >= 0; mi -- ) {
if ( this.materials[ mi ].groupCount <= 0 ) {
this.materials.splice( mi, 1 );
}
}
}
// Guarantee at least one empty material, this makes the creation later more straight forward.
if ( end && this.materials.length === 0 ) {
this.materials.push( {
name: '',
smooth: this.smooth
} );
}
return lastMultiMaterial;
}
};
// Inherit previous objects material.
// Spec tells us that a declared material must be set to all objects until a new material is declared.
// If a usemtl declaration is encountered while this new object is being parsed, it will
// overwrite the inherited material. Exception being that there was already face declarations
// to the inherited material, then it will be preserved for proper MultiMaterial continuation.
if ( previousMaterial && previousMaterial.name && typeof previousMaterial.clone === 'function' ) {
const declared = previousMaterial.clone( 0 );
declared.inherited = true;
this.object.materials.push( declared );
}
this.objects.push( this.object );
},
finalize: function () {
if ( this.object && typeof this.object._finalize === 'function' ) {
this.object._finalize( true );
}
},
parseVertexIndex: function ( value, len ) {
const index = parseInt( value, 10 );
return ( index >= 0 ? index - 1 : index + len / 3 ) * 3;
},
parseNormalIndex: function ( value, len ) {
const index = parseInt( value, 10 );
return ( index >= 0 ? index - 1 : index + len / 3 ) * 3;
},
parseUVIndex: function ( value, len ) {
const index = parseInt( value, 10 );
return ( index >= 0 ? index - 1 : index + len / 2 ) * 2;
},
addVertex: function ( a, b, c ) {
const src = this.vertices;
const dst = this.object.geometry.vertices;
dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
dst.push( src[ b + 0 ], src[ b + 1 ], src[ b + 2 ] );
dst.push( src[ c + 0 ], src[ c + 1 ], src[ c + 2 ] );
},
addVertexPoint: function ( a ) {
const src = this.vertices;
const dst = this.object.geometry.vertices;
dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
},
addVertexLine: function ( a ) {
const src = this.vertices;
const dst = this.object.geometry.vertices;
dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
},
addNormal: function ( a, b, c ) {
const src = this.normals;
const dst = this.object.geometry.normals;
dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
dst.push( src[ b + 0 ], src[ b + 1 ], src[ b + 2 ] );
dst.push( src[ c + 0 ], src[ c + 1 ], src[ c + 2 ] );
},
addFaceNormal: function ( a, b, c ) {
const src = this.vertices;
const dst = this.object.geometry.normals;
_vA.fromArray( src, a );
_vB.fromArray( src, b );
_vC.fromArray( src, c );
_cb.subVectors( _vC, _vB );
_ab.subVectors( _vA, _vB );
_cb.cross( _ab );
_cb.normalize();
dst.push( _cb.x, _cb.y, _cb.z );
dst.push( _cb.x, _cb.y, _cb.z );
dst.push( _cb.x, _cb.y, _cb.z );
},
addColor: function ( a, b, c ) {
const src = this.colors;
const dst = this.object.geometry.colors;
if ( src[ a ] !== undefined ) dst.push( src[ a + 0 ], src[ a + 1 ], src[ a + 2 ] );
if ( src[ b ] !== undefined ) dst.push( src[ b + 0 ], src[ b + 1 ], src[ b + 2 ] );
if ( src[ c ] !== undefined ) dst.push( src[ c + 0 ], src[ c + 1 ], src[ c + 2 ] );
},
addUV: function ( a, b, c ) {
const src = this.uvs;
const dst = this.object.geometry.uvs;
dst.push( src[ a + 0 ], src[ a + 1 ] );
dst.push( src[ b + 0 ], src[ b + 1 ] );
dst.push( src[ c + 0 ], src[ c + 1 ] );
},
addDefaultUV: function () {
const dst = this.object.geometry.uvs;
dst.push( 0, 0 );
dst.push( 0, 0 );
dst.push( 0, 0 );
},
addUVLine: function ( a ) {
const src = this.uvs;
const dst = this.object.geometry.uvs;
dst.push( src[ a + 0 ], src[ a + 1 ] );
},
addFace: function ( a, b, c, ua, ub, uc, na, nb, nc ) {
const vLen = this.vertices.length;
let ia = this.parseVertexIndex( a, vLen );
let ib = this.parseVertexIndex( b, vLen );
let ic = this.parseVertexIndex( c, vLen );
this.addVertex( ia, ib, ic );
this.addColor( ia, ib, ic );
// normals
if ( na !== undefined && na !== '' ) {
const nLen = this.normals.length;
ia = this.parseNormalIndex( na, nLen );
ib = this.parseNormalIndex( nb, nLen );
ic = this.parseNormalIndex( nc, nLen );
this.addNormal( ia, ib, ic );
} else {
this.addFaceNormal( ia, ib, ic );
}
// uvs
if ( ua !== undefined && ua !== '' ) {
const uvLen = this.uvs.length;
ia = this.parseUVIndex( ua, uvLen );
ib = this.parseUVIndex( ub, uvLen );
ic = this.parseUVIndex( uc, uvLen );
this.addUV( ia, ib, ic );
this.object.geometry.hasUVIndices = true;
} else {
// add placeholder values (for inconsistent face definitions)
this.addDefaultUV();
}
},
addPointGeometry: function ( vertices ) {
this.object.geometry.type = 'Points';
const vLen = this.vertices.length;
for ( let vi = 0, l = vertices.length; vi < l; vi ++ ) {
const index = this.parseVertexIndex( vertices[ vi ], vLen );
this.addVertexPoint( index );
this.addColor( index );
}
},
addLineGeometry: function ( vertices, uvs ) {
this.object.geometry.type = 'Line';
const vLen = this.vertices.length;
const uvLen = this.uvs.length;
for ( let vi = 0, l = vertices.length; vi < l; vi ++ ) {
this.addVertexLine( this.parseVertexIndex( vertices[ vi ], vLen ) );
}
for ( let uvi = 0, l = uvs.length; uvi < l; uvi ++ ) {
this.addUVLine( this.parseUVIndex( uvs[ uvi ], uvLen ) );
}
}
};
state.startObject( '', false );
return state;
}
//
class OBJLoader extends Loader {
constructor( manager ) {
super( manager );
this.materials = null;
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
setMaterials( materials ) {
this.materials = materials;
return this;
}
parse( text ) {
const state = new ParserState();
if ( text.indexOf( '\r\n' ) !== - 1 ) {
// This is faster than String.split with regex that splits on both
text = text.replace( /\r\n/g, '\n' );
}
if ( text.indexOf( '\\\n' ) !== - 1 ) {
// join lines separated by a line continuation character (\)
text = text.replace( /\\\n/g, '' );
}
const lines = text.split( '\n' );
let result = [];
for ( let i = 0, l = lines.length; i < l; i ++ ) {
const line = lines[ i ].trimStart();
if ( line.length === 0 ) continue;
const lineFirstChar = line.charAt( 0 );
// @todo invoke passed in handler if any
if ( lineFirstChar === '#' ) continue; // skip comments
if ( lineFirstChar === 'v' ) {
const data = line.split( _face_vertex_data_separator_pattern );
switch ( data[ 0 ] ) {
case 'v':
state.vertices.push(
parseFloat( data[ 1 ] ),
parseFloat( data[ 2 ] ),
parseFloat( data[ 3 ] )
);
if ( data.length >= 7 ) {
_color.setRGB(
parseFloat( data[ 4 ] ),
parseFloat( data[ 5 ] ),
parseFloat( data[ 6 ] )
).convertSRGBToLinear();
state.colors.push( _color.r, _color.g, _color.b );
} else {
// if no colors are defined, add placeholders so color and vertex indices match
state.colors.push( undefined, undefined, undefined );
}
break;
case 'vn':
state.normals.push(
parseFloat( data[ 1 ] ),
parseFloat( data[ 2 ] ),
parseFloat( data[ 3 ] )
);
break;
case 'vt':
state.uvs.push(
parseFloat( data[ 1 ] ),
parseFloat( data[ 2 ] )
);
break;
}
} else if ( lineFirstChar === 'f' ) {
const lineData = line.slice( 1 ).trim();
const vertexData = lineData.split( _face_vertex_data_separator_pattern );
const faceVertices = [];
// Parse the face vertex data into an easy to work with format
for ( let j = 0, jl = vertexData.length; j < jl; j ++ ) {
const vertex = vertexData[ j ];
if ( vertex.length > 0 ) {
const vertexParts = vertex.split( '/' );
faceVertices.push( vertexParts );
}
}
// Draw an edge between the first vertex and all subsequent vertices to form an n-gon
const v1 = faceVertices[ 0 ];
for ( let j = 1, jl = faceVertices.length - 1; j < jl; j ++ ) {
const v2 = faceVertices[ j ];
const v3 = faceVertices[ j + 1 ];
state.addFace(
v1[ 0 ], v2[ 0 ], v3[ 0 ],
v1[ 1 ], v2[ 1 ], v3[ 1 ],
v1[ 2 ], v2[ 2 ], v3[ 2 ]
);
}
} else if ( lineFirstChar === 'l' ) {
const lineParts = line.substring( 1 ).trim().split( ' ' );
let lineVertices = [];
const lineUVs = [];
if ( line.indexOf( '/' ) === - 1 ) {
lineVertices = lineParts;
} else {
for ( let li = 0, llen = lineParts.length; li < llen; li ++ ) {
const parts = lineParts[ li ].split( '/' );
if ( parts[ 0 ] !== '' ) lineVertices.push( parts[ 0 ] );
if ( parts[ 1 ] !== '' ) lineUVs.push( parts[ 1 ] );
}
}
state.addLineGeometry( lineVertices, lineUVs );
} else if ( lineFirstChar === 'p' ) {
const lineData = line.slice( 1 ).trim();
const pointData = lineData.split( ' ' );
state.addPointGeometry( pointData );
} else if ( ( result = _object_pattern.exec( line ) ) !== null ) {
// o object_name
// or
// g group_name
// WORKAROUND: https://bugs.chromium.org/p/v8/issues/detail?id=2869
// let name = result[ 0 ].slice( 1 ).trim();
const name = ( ' ' + result[ 0 ].slice( 1 ).trim() ).slice( 1 );
state.startObject( name );
} else if ( _material_use_pattern.test( line ) ) {
// material
state.object.startMaterial( line.substring( 7 ).trim(), state.materialLibraries );
} else if ( _material_library_pattern.test( line ) ) {
// mtl file
state.materialLibraries.push( line.substring( 7 ).trim() );
} else if ( _map_use_pattern.test( line ) ) {
// the line is parsed but ignored since the loader assumes textures are defined MTL files
// (according to https://www.okino.com/conv/imp_wave.htm, 'usemap' is the old-style Wavefront texture reference method)
console.warn( 'THREE.OBJLoader: Rendering identifier "usemap" not supported. Textures must be defined in MTL files.' );
} else if ( lineFirstChar === 's' ) {
result = line.split( ' ' );
// smooth shading
// @todo Handle files that have varying smooth values for a set of faces inside one geometry,
// but does not define a usemtl for each face set.
// This should be detected and a dummy material created (later MultiMaterial and geometry groups).
// This requires some care to not create extra material on each smooth value for "normal" obj files.
// where explicit usemtl defines geometry groups.
// Example asset: examples/models/obj/cerberus/Cerberus.obj
/*
* http://paulbourke.net/dataformats/obj/
*
* From chapter "Grouping" Syntax explanation "s group_number":
* "group_number is the smoothing group number. To turn off smoothing groups, use a value of 0 or off.
* Polygonal elements use group numbers to put elements in different smoothing groups. For free-form
* surfaces, smoothing groups are either turned on or off; there is no difference between values greater
* than 0."
*/
if ( result.length > 1 ) {
const value = result[ 1 ].trim().toLowerCase();
state.object.smooth = ( value !== '0' && value !== 'off' );
} else {
// ZBrush can produce "s" lines #11707
state.object.smooth = true;
}
const material = state.object.currentMaterial();
if ( material ) material.smooth = state.object.smooth;
} else {
// Handle null terminated files without exception
if ( line === '\0' ) continue;
console.warn( 'THREE.OBJLoader: Unexpected line: "' + line + '"' );
}
}
state.finalize();
const container = new Group();
container.materialLibraries = [].concat( state.materialLibraries );
const hasPrimitives = ! ( state.objects.length === 1 && state.objects[ 0 ].geometry.vertices.length === 0 );
if ( hasPrimitives === true ) {
for ( let i = 0, l = state.objects.length; i < l; i ++ ) {
const object = state.objects[ i ];
const geometry = object.geometry;
const materials = object.materials;
const isLine = ( geometry.type === 'Line' );
const isPoints = ( geometry.type === 'Points' );
let hasVertexColors = false;
// Skip o/g line declarations that did not follow with any faces
if ( geometry.vertices.length === 0 ) continue;
const buffergeometry = new BufferGeometry();
buffergeometry.setAttribute( 'position', new Float32BufferAttribute( geometry.vertices, 3 ) );
if ( geometry.normals.length > 0 ) {
buffergeometry.setAttribute( 'normal', new Float32BufferAttribute( geometry.normals, 3 ) );
}
if ( geometry.colors.length > 0 ) {
hasVertexColors = true;
buffergeometry.setAttribute( 'color', new Float32BufferAttribute( geometry.colors, 3 ) );
}
if ( geometry.hasUVIndices === true ) {
buffergeometry.setAttribute( 'uv', new Float32BufferAttribute( geometry.uvs, 2 ) );
}
// Create materials
const createdMaterials = [];
for ( let mi = 0, miLen = materials.length; mi < miLen; mi ++ ) {
const sourceMaterial = materials[ mi ];
const materialHash = sourceMaterial.name + '_' + sourceMaterial.smooth + '_' + hasVertexColors;
let material = state.materials[ materialHash ];
if ( this.materials !== null ) {
material = this.materials.create( sourceMaterial.name );
// mtl etc. loaders probably can't create line materials correctly, copy properties to a line material.
if ( isLine && material && ! ( material instanceof LineBasicMaterial ) ) {
const materialLine = new LineBasicMaterial();
Material.prototype.copy.call( materialLine, material );
materialLine.color.copy( material.color );
material = materialLine;
} else if ( isPoints && material && ! ( material instanceof PointsMaterial ) ) {
const materialPoints = new PointsMaterial( { size: 10, sizeAttenuation: false } );
Material.prototype.copy.call( materialPoints, material );
materialPoints.color.copy( material.color );
materialPoints.map = material.map;
material = materialPoints;
}
}
if ( material === undefined ) {
if ( isLine ) {
material = new LineBasicMaterial();
} else if ( isPoints ) {
material = new PointsMaterial( { size: 1, sizeAttenuation: false } );
} else {
material = new MeshPhongMaterial();
}
material.name = sourceMaterial.name;
material.flatShading = sourceMaterial.smooth ? false : true;
material.vertexColors = hasVertexColors;
state.materials[ materialHash ] = material;
}
createdMaterials.push( material );
}
// Create mesh
let mesh;
if ( createdMaterials.length > 1 ) {
for ( let mi = 0, miLen = materials.length; mi < miLen; mi ++ ) {
const sourceMaterial = materials[ mi ];
buffergeometry.addGroup( sourceMaterial.groupStart, sourceMaterial.groupCount, mi );
}
if ( isLine ) {
mesh = new LineSegments( buffergeometry, createdMaterials );
} else if ( isPoints ) {
mesh = new Points( buffergeometry, createdMaterials );
} else {
mesh = new Mesh( buffergeometry, createdMaterials );
}
} else {
if ( isLine ) {
mesh = new LineSegments( buffergeometry, createdMaterials[ 0 ] );
} else if ( isPoints ) {
mesh = new Points( buffergeometry, createdMaterials[ 0 ] );
} else {
mesh = new Mesh( buffergeometry, createdMaterials[ 0 ] );
}
}
mesh.name = object.name;
container.add( mesh );
}
} else {
// if there is only the default parser state object with no geometry data, interpret data as point cloud
if ( state.vertices.length > 0 ) {
const material = new PointsMaterial( { size: 1, sizeAttenuation: false } );
const buffergeometry = new BufferGeometry();
buffergeometry.setAttribute( 'position', new Float32BufferAttribute( state.vertices, 3 ) );
if ( state.colors.length > 0 && state.colors[ 0 ] !== undefined ) {
buffergeometry.setAttribute( 'color', new Float32BufferAttribute( state.colors, 3 ) );
material.vertexColors = true;
}
const points = new Points( buffergeometry, material );
container.add( points );
}
}
return container;
}
}
export { OBJLoader };

View File

@ -0,0 +1,110 @@
import { Object3D, Sphere, Box3 } from '/static/javascript/three/build/three.module.js';
import { XRHandMeshModel } from './XRHandMeshModel.js';
const TOUCH_RADIUS = 0.01;
const POINTING_JOINT = 'index-finger-tip';
class OculusHandModel extends Object3D {
constructor( controller, loader = null, onLoad = null ) {
super();
this.controller = controller;
this.motionController = null;
this.envMap = null;
this.loader = loader;
this.onLoad = onLoad;
this.mesh = null;
controller.addEventListener( 'connected', ( event ) => {
const xrInputSource = event.data;
if ( xrInputSource.hand && ! this.motionController ) {
this.xrInputSource = xrInputSource;
this.motionController = new XRHandMeshModel( this, controller, this.path, xrInputSource.handedness, this.loader, this.onLoad );
}
} );
controller.addEventListener( 'disconnected', () => {
this.clear();
this.motionController = null;
} );
}
updateMatrixWorld( force ) {
super.updateMatrixWorld( force );
if ( this.motionController ) {
this.motionController.updateMesh();
}
}
getPointerPosition() {
const indexFingerTip = this.controller.joints[ POINTING_JOINT ];
if ( indexFingerTip ) {
return indexFingerTip.position;
} else {
return null;
}
}
intersectBoxObject( boxObject ) {
const pointerPosition = this.getPointerPosition();
if ( pointerPosition ) {
const indexSphere = new Sphere( pointerPosition, TOUCH_RADIUS );
const box = new Box3().setFromObject( boxObject );
return indexSphere.intersectsBox( box );
} else {
return false;
}
}
checkButton( button ) {
if ( this.intersectBoxObject( button ) ) {
button.onPress();
} else {
button.onClear();
}
if ( button.isPressed() ) {
button.whilePressed();
}
}
}
export { OculusHandModel };

View File

@ -0,0 +1,413 @@
import * as THREE from '/static/javascript/three/build/three.module.js';
const PINCH_MAX = 0.05;
const PINCH_THRESHOLD = 0.02;
const PINCH_MIN = 0.01;
const POINTER_ADVANCE_MAX = 0.02;
const POINTER_OPACITY_MAX = 1;
const POINTER_OPACITY_MIN = 0.4;
const POINTER_FRONT_RADIUS = 0.002;
const POINTER_REAR_RADIUS = 0.01;
const POINTER_REAR_RADIUS_MIN = 0.003;
const POINTER_LENGTH = 0.035;
const POINTER_SEGMENTS = 16;
const POINTER_RINGS = 12;
const POINTER_HEMISPHERE_ANGLE = 110;
const YAXIS = /* @__PURE__ */ new THREE.Vector3( 0, 1, 0 );
const ZAXIS = /* @__PURE__ */ new THREE.Vector3( 0, 0, 1 );
const CURSOR_RADIUS = 0.02;
const CURSOR_MAX_DISTANCE = 1.5;
class OculusHandPointerModel extends THREE.Object3D {
constructor( hand, controller ) {
super();
this.hand = hand;
this.controller = controller;
// Unused
this.motionController = null;
this.envMap = null;
this.mesh = null;
this.pointerGeometry = null;
this.pointerMesh = null;
this.pointerObject = null;
this.pinched = false;
this.attached = false;
this.cursorObject = null;
this.raycaster = null;
this._onConnected = this._onConnected.bind( this );
this._onDisconnected = this._onDisconnected.bind( this );
this.hand.addEventListener( 'connected', this._onConnected );
this.hand.addEventListener( 'disconnected', this._onDisconnected );
}
_onConnected( event ) {
const xrInputSource = event.data;
if ( xrInputSource.hand ) {
this.visible = true;
this.xrInputSource = xrInputSource;
this.createPointer();
}
}
_onDisconnected() {
this.visible = false;
this.xrInputSource = null;
if ( this.pointerGeometry ) this.pointerGeometry.dispose();
if ( this.pointerMesh && this.pointerMesh.material ) this.pointerMesh.material.dispose();
this.clear();
}
_drawVerticesRing( vertices, baseVector, ringIndex ) {
const segmentVector = baseVector.clone();
for ( let i = 0; i < POINTER_SEGMENTS; i ++ ) {
segmentVector.applyAxisAngle( ZAXIS, ( Math.PI * 2 ) / POINTER_SEGMENTS );
const vid = ringIndex * POINTER_SEGMENTS + i;
vertices[ 3 * vid ] = segmentVector.x;
vertices[ 3 * vid + 1 ] = segmentVector.y;
vertices[ 3 * vid + 2 ] = segmentVector.z;
}
}
_updatePointerVertices( rearRadius ) {
const vertices = this.pointerGeometry.attributes.position.array;
// first ring for front face
const frontFaceBase = new THREE.Vector3(
POINTER_FRONT_RADIUS,
0,
- 1 * ( POINTER_LENGTH - rearRadius )
);
this._drawVerticesRing( vertices, frontFaceBase, 0 );
// rings for rear hemisphere
const rearBase = new THREE.Vector3(
Math.sin( ( Math.PI * POINTER_HEMISPHERE_ANGLE ) / 180 ) * rearRadius,
Math.cos( ( Math.PI * POINTER_HEMISPHERE_ANGLE ) / 180 ) * rearRadius,
0
);
for ( let i = 0; i < POINTER_RINGS; i ++ ) {
this._drawVerticesRing( vertices, rearBase, i + 1 );
rearBase.applyAxisAngle(
YAXIS,
( Math.PI * POINTER_HEMISPHERE_ANGLE ) / 180 / ( POINTER_RINGS * - 2 )
);
}
// front and rear face center vertices
const frontCenterIndex = POINTER_SEGMENTS * ( 1 + POINTER_RINGS );
const rearCenterIndex = POINTER_SEGMENTS * ( 1 + POINTER_RINGS ) + 1;
const frontCenter = new THREE.Vector3(
0,
0,
- 1 * ( POINTER_LENGTH - rearRadius )
);
vertices[ frontCenterIndex * 3 ] = frontCenter.x;
vertices[ frontCenterIndex * 3 + 1 ] = frontCenter.y;
vertices[ frontCenterIndex * 3 + 2 ] = frontCenter.z;
const rearCenter = new THREE.Vector3( 0, 0, rearRadius );
vertices[ rearCenterIndex * 3 ] = rearCenter.x;
vertices[ rearCenterIndex * 3 + 1 ] = rearCenter.y;
vertices[ rearCenterIndex * 3 + 2 ] = rearCenter.z;
this.pointerGeometry.setAttribute(
'position',
new THREE.Float32BufferAttribute( vertices, 3 )
);
// verticesNeedUpdate = true;
}
createPointer() {
let i, j;
const vertices = new Array(
( ( POINTER_RINGS + 1 ) * POINTER_SEGMENTS + 2 ) * 3
).fill( 0 );
// const vertices = [];
const indices = [];
this.pointerGeometry = new THREE.BufferGeometry();
this.pointerGeometry.setAttribute(
'position',
new THREE.Float32BufferAttribute( vertices, 3 )
);
this._updatePointerVertices( POINTER_REAR_RADIUS );
// construct faces to connect rings
for ( i = 0; i < POINTER_RINGS; i ++ ) {
for ( j = 0; j < POINTER_SEGMENTS - 1; j ++ ) {
indices.push(
i * POINTER_SEGMENTS + j,
i * POINTER_SEGMENTS + j + 1,
( i + 1 ) * POINTER_SEGMENTS + j
);
indices.push(
i * POINTER_SEGMENTS + j + 1,
( i + 1 ) * POINTER_SEGMENTS + j + 1,
( i + 1 ) * POINTER_SEGMENTS + j
);
}
indices.push(
( i + 1 ) * POINTER_SEGMENTS - 1,
i * POINTER_SEGMENTS,
( i + 2 ) * POINTER_SEGMENTS - 1
);
indices.push(
i * POINTER_SEGMENTS,
( i + 1 ) * POINTER_SEGMENTS,
( i + 2 ) * POINTER_SEGMENTS - 1
);
}
// construct front and rear face
const frontCenterIndex = POINTER_SEGMENTS * ( 1 + POINTER_RINGS );
const rearCenterIndex = POINTER_SEGMENTS * ( 1 + POINTER_RINGS ) + 1;
for ( i = 0; i < POINTER_SEGMENTS - 1; i ++ ) {
indices.push( frontCenterIndex, i + 1, i );
indices.push(
rearCenterIndex,
i + POINTER_SEGMENTS * POINTER_RINGS,
i + POINTER_SEGMENTS * POINTER_RINGS + 1
);
}
indices.push( frontCenterIndex, 0, POINTER_SEGMENTS - 1 );
indices.push(
rearCenterIndex,
POINTER_SEGMENTS * ( POINTER_RINGS + 1 ) - 1,
POINTER_SEGMENTS * POINTER_RINGS
);
const material = new THREE.MeshBasicMaterial();
material.transparent = true;
material.opacity = POINTER_OPACITY_MIN;
this.pointerGeometry.setIndex( indices );
this.pointerMesh = new THREE.Mesh( this.pointerGeometry, material );
this.pointerMesh.position.set( 0, 0, - 1 * POINTER_REAR_RADIUS );
this.pointerObject = new THREE.Object3D();
this.pointerObject.add( this.pointerMesh );
this.raycaster = new THREE.Raycaster();
// create cursor
const cursorGeometry = new THREE.SphereGeometry( CURSOR_RADIUS, 10, 10 );
const cursorMaterial = new THREE.MeshBasicMaterial();
cursorMaterial.transparent = true;
cursorMaterial.opacity = POINTER_OPACITY_MIN;
this.cursorObject = new THREE.Mesh( cursorGeometry, cursorMaterial );
this.pointerObject.add( this.cursorObject );
this.add( this.pointerObject );
}
_updateRaycaster() {
if ( this.raycaster ) {
const pointerMatrix = this.pointerObject.matrixWorld;
const tempMatrix = new THREE.Matrix4();
tempMatrix.identity().extractRotation( pointerMatrix );
this.raycaster.ray.origin.setFromMatrixPosition( pointerMatrix );
this.raycaster.ray.direction.set( 0, 0, - 1 ).applyMatrix4( tempMatrix );
}
}
_updatePointer() {
this.pointerObject.visible = this.controller.visible;
const indexTip = this.hand.joints[ 'index-finger-tip' ];
const thumbTip = this.hand.joints[ 'thumb-tip' ];
const distance = indexTip.position.distanceTo( thumbTip.position );
const position = indexTip.position
.clone()
.add( thumbTip.position )
.multiplyScalar( 0.5 );
this.pointerObject.position.copy( position );
this.pointerObject.quaternion.copy( this.controller.quaternion );
this.pinched = distance <= PINCH_THRESHOLD;
const pinchScale = ( distance - PINCH_MIN ) / ( PINCH_MAX - PINCH_MIN );
const focusScale = ( distance - PINCH_MIN ) / ( PINCH_THRESHOLD - PINCH_MIN );
if ( pinchScale > 1 ) {
this._updatePointerVertices( POINTER_REAR_RADIUS );
this.pointerMesh.position.set( 0, 0, - 1 * POINTER_REAR_RADIUS );
this.pointerMesh.material.opacity = POINTER_OPACITY_MIN;
} else if ( pinchScale > 0 ) {
const rearRadius =
( POINTER_REAR_RADIUS - POINTER_REAR_RADIUS_MIN ) * pinchScale +
POINTER_REAR_RADIUS_MIN;
this._updatePointerVertices( rearRadius );
if ( focusScale < 1 ) {
this.pointerMesh.position.set(
0,
0,
- 1 * rearRadius - ( 1 - focusScale ) * POINTER_ADVANCE_MAX
);
this.pointerMesh.material.opacity =
POINTER_OPACITY_MIN +
( 1 - focusScale ) * ( POINTER_OPACITY_MAX - POINTER_OPACITY_MIN );
} else {
this.pointerMesh.position.set( 0, 0, - 1 * rearRadius );
this.pointerMesh.material.opacity = POINTER_OPACITY_MIN;
}
} else {
this._updatePointerVertices( POINTER_REAR_RADIUS_MIN );
this.pointerMesh.position.set(
0,
0,
- 1 * POINTER_REAR_RADIUS_MIN - POINTER_ADVANCE_MAX
);
this.pointerMesh.material.opacity = POINTER_OPACITY_MAX;
}
this.cursorObject.material.opacity = this.pointerMesh.material.opacity;
}
updateMatrixWorld( force ) {
super.updateMatrixWorld( force );
if ( this.pointerGeometry ) {
this._updatePointer();
this._updateRaycaster();
}
}
isPinched() {
return this.pinched;
}
setAttached( attached ) {
this.attached = attached;
}
isAttached() {
return this.attached;
}
intersectObject( object, recursive = true ) {
if ( this.raycaster ) {
return this.raycaster.intersectObject( object, recursive );
}
}
intersectObjects( objects, recursive = true ) {
if ( this.raycaster ) {
return this.raycaster.intersectObjects( objects, recursive );
}
}
checkIntersections( objects, recursive = false ) {
if ( this.raycaster && ! this.attached ) {
const intersections = this.raycaster.intersectObjects( objects, recursive );
const direction = new THREE.Vector3( 0, 0, - 1 );
if ( intersections.length > 0 ) {
const intersection = intersections[ 0 ];
const distance = intersection.distance;
this.cursorObject.position.copy( direction.multiplyScalar( distance ) );
} else {
this.cursorObject.position.copy( direction.multiplyScalar( CURSOR_MAX_DISTANCE ) );
}
}
}
setCursor( distance ) {
const direction = new THREE.Vector3( 0, 0, - 1 );
if ( this.raycaster && ! this.attached ) {
this.cursorObject.position.copy( direction.multiplyScalar( distance ) );
}
}
dispose() {
this._onDisconnected();
this.hand.removeEventListener( 'connected', this._onConnected );
this.hand.removeEventListener( 'disconnected', this._onDisconnected );
}
}
export { OculusHandPointerModel };

View File

@ -0,0 +1,654 @@
import {
AdditiveBlending,
Color,
DoubleSide,
HalfFloatType,
Matrix4,
MeshDepthMaterial,
NoBlending,
RGBADepthPacking,
ShaderMaterial,
UniformsUtils,
Vector2,
Vector3,
WebGLRenderTarget
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from './Pass.js';
import { CopyShader } from '../shaders/CopyShader.js';
class OutlinePass extends Pass {
constructor( resolution, scene, camera, selectedObjects ) {
super();
this.renderScene = scene;
this.renderCamera = camera;
this.selectedObjects = selectedObjects !== undefined ? selectedObjects : [];
this.visibleEdgeColor = new Color( 1, 1, 1 );
this.hiddenEdgeColor = new Color( 0.1, 0.04, 0.02 );
this.edgeGlow = 0.0;
this.usePatternTexture = false;
this.edgeThickness = 1.0;
this.edgeStrength = 3.0;
this.downSampleRatio = 2;
this.pulsePeriod = 0;
this._visibilityCache = new Map();
this.resolution = ( resolution !== undefined ) ? new Vector2( resolution.x, resolution.y ) : new Vector2( 256, 256 );
const resx = Math.round( this.resolution.x / this.downSampleRatio );
const resy = Math.round( this.resolution.y / this.downSampleRatio );
this.renderTargetMaskBuffer = new WebGLRenderTarget( this.resolution.x, this.resolution.y );
this.renderTargetMaskBuffer.texture.name = 'OutlinePass.mask';
this.renderTargetMaskBuffer.texture.generateMipmaps = false;
this.depthMaterial = new MeshDepthMaterial();
this.depthMaterial.side = DoubleSide;
this.depthMaterial.depthPacking = RGBADepthPacking;
this.depthMaterial.blending = NoBlending;
this.prepareMaskMaterial = this.getPrepareMaskMaterial();
this.prepareMaskMaterial.side = DoubleSide;
this.prepareMaskMaterial.fragmentShader = replaceDepthToViewZ( this.prepareMaskMaterial.fragmentShader, this.renderCamera );
this.renderTargetDepthBuffer = new WebGLRenderTarget( this.resolution.x, this.resolution.y, { type: HalfFloatType } );
this.renderTargetDepthBuffer.texture.name = 'OutlinePass.depth';
this.renderTargetDepthBuffer.texture.generateMipmaps = false;
this.renderTargetMaskDownSampleBuffer = new WebGLRenderTarget( resx, resy, { type: HalfFloatType } );
this.renderTargetMaskDownSampleBuffer.texture.name = 'OutlinePass.depthDownSample';
this.renderTargetMaskDownSampleBuffer.texture.generateMipmaps = false;
this.renderTargetBlurBuffer1 = new WebGLRenderTarget( resx, resy, { type: HalfFloatType } );
this.renderTargetBlurBuffer1.texture.name = 'OutlinePass.blur1';
this.renderTargetBlurBuffer1.texture.generateMipmaps = false;
this.renderTargetBlurBuffer2 = new WebGLRenderTarget( Math.round( resx / 2 ), Math.round( resy / 2 ), { type: HalfFloatType } );
this.renderTargetBlurBuffer2.texture.name = 'OutlinePass.blur2';
this.renderTargetBlurBuffer2.texture.generateMipmaps = false;
this.edgeDetectionMaterial = this.getEdgeDetectionMaterial();
this.renderTargetEdgeBuffer1 = new WebGLRenderTarget( resx, resy, { type: HalfFloatType } );
this.renderTargetEdgeBuffer1.texture.name = 'OutlinePass.edge1';
this.renderTargetEdgeBuffer1.texture.generateMipmaps = false;
this.renderTargetEdgeBuffer2 = new WebGLRenderTarget( Math.round( resx / 2 ), Math.round( resy / 2 ), { type: HalfFloatType } );
this.renderTargetEdgeBuffer2.texture.name = 'OutlinePass.edge2';
this.renderTargetEdgeBuffer2.texture.generateMipmaps = false;
const MAX_EDGE_THICKNESS = 4;
const MAX_EDGE_GLOW = 4;
this.separableBlurMaterial1 = this.getSeperableBlurMaterial( MAX_EDGE_THICKNESS );
this.separableBlurMaterial1.uniforms[ 'texSize' ].value.set( resx, resy );
this.separableBlurMaterial1.uniforms[ 'kernelRadius' ].value = 1;
this.separableBlurMaterial2 = this.getSeperableBlurMaterial( MAX_EDGE_GLOW );
this.separableBlurMaterial2.uniforms[ 'texSize' ].value.set( Math.round( resx / 2 ), Math.round( resy / 2 ) );
this.separableBlurMaterial2.uniforms[ 'kernelRadius' ].value = MAX_EDGE_GLOW;
// Overlay material
this.overlayMaterial = this.getOverlayMaterial();
// copy material
const copyShader = CopyShader;
this.copyUniforms = UniformsUtils.clone( copyShader.uniforms );
this.materialCopy = new ShaderMaterial( {
uniforms: this.copyUniforms,
vertexShader: copyShader.vertexShader,
fragmentShader: copyShader.fragmentShader,
blending: NoBlending,
depthTest: false,
depthWrite: false
} );
this.enabled = true;
this.needsSwap = false;
this._oldClearColor = new Color();
this.oldClearAlpha = 1;
this.fsQuad = new FullScreenQuad( null );
this.tempPulseColor1 = new Color();
this.tempPulseColor2 = new Color();
this.textureMatrix = new Matrix4();
function replaceDepthToViewZ( string, camera ) {
const type = camera.isPerspectiveCamera ? 'perspective' : 'orthographic';
return string.replace( /DEPTH_TO_VIEW_Z/g, type + 'DepthToViewZ' );
}
}
dispose() {
this.renderTargetMaskBuffer.dispose();
this.renderTargetDepthBuffer.dispose();
this.renderTargetMaskDownSampleBuffer.dispose();
this.renderTargetBlurBuffer1.dispose();
this.renderTargetBlurBuffer2.dispose();
this.renderTargetEdgeBuffer1.dispose();
this.renderTargetEdgeBuffer2.dispose();
this.depthMaterial.dispose();
this.prepareMaskMaterial.dispose();
this.edgeDetectionMaterial.dispose();
this.separableBlurMaterial1.dispose();
this.separableBlurMaterial2.dispose();
this.overlayMaterial.dispose();
this.materialCopy.dispose();
this.fsQuad.dispose();
}
setSize( width, height ) {
this.renderTargetMaskBuffer.setSize( width, height );
this.renderTargetDepthBuffer.setSize( width, height );
let resx = Math.round( width / this.downSampleRatio );
let resy = Math.round( height / this.downSampleRatio );
this.renderTargetMaskDownSampleBuffer.setSize( resx, resy );
this.renderTargetBlurBuffer1.setSize( resx, resy );
this.renderTargetEdgeBuffer1.setSize( resx, resy );
this.separableBlurMaterial1.uniforms[ 'texSize' ].value.set( resx, resy );
resx = Math.round( resx / 2 );
resy = Math.round( resy / 2 );
this.renderTargetBlurBuffer2.setSize( resx, resy );
this.renderTargetEdgeBuffer2.setSize( resx, resy );
this.separableBlurMaterial2.uniforms[ 'texSize' ].value.set( resx, resy );
}
changeVisibilityOfSelectedObjects( bVisible ) {
const cache = this._visibilityCache;
function gatherSelectedMeshesCallBack( object ) {
if ( object.isMesh ) {
if ( bVisible === true ) {
object.visible = cache.get( object );
} else {
cache.set( object, object.visible );
object.visible = bVisible;
}
}
}
for ( let i = 0; i < this.selectedObjects.length; i ++ ) {
const selectedObject = this.selectedObjects[ i ];
selectedObject.traverse( gatherSelectedMeshesCallBack );
}
}
changeVisibilityOfNonSelectedObjects( bVisible ) {
const cache = this._visibilityCache;
const selectedMeshes = [];
function gatherSelectedMeshesCallBack( object ) {
if ( object.isMesh ) selectedMeshes.push( object );
}
for ( let i = 0; i < this.selectedObjects.length; i ++ ) {
const selectedObject = this.selectedObjects[ i ];
selectedObject.traverse( gatherSelectedMeshesCallBack );
}
function VisibilityChangeCallBack( object ) {
if ( object.isMesh || object.isSprite ) {
// only meshes and sprites are supported by OutlinePass
let bFound = false;
for ( let i = 0; i < selectedMeshes.length; i ++ ) {
const selectedObjectId = selectedMeshes[ i ].id;
if ( selectedObjectId === object.id ) {
bFound = true;
break;
}
}
if ( bFound === false ) {
const visibility = object.visible;
if ( bVisible === false || cache.get( object ) === true ) {
object.visible = bVisible;
}
cache.set( object, visibility );
}
} else if ( object.isPoints || object.isLine ) {
// the visibilty of points and lines is always set to false in order to
// not affect the outline computation
if ( bVisible === true ) {
object.visible = cache.get( object ); // restore
} else {
cache.set( object, object.visible );
object.visible = bVisible;
}
}
}
this.renderScene.traverse( VisibilityChangeCallBack );
}
updateTextureMatrix() {
this.textureMatrix.set( 0.5, 0.0, 0.0, 0.5,
0.0, 0.5, 0.0, 0.5,
0.0, 0.0, 0.5, 0.5,
0.0, 0.0, 0.0, 1.0 );
this.textureMatrix.multiply( this.renderCamera.projectionMatrix );
this.textureMatrix.multiply( this.renderCamera.matrixWorldInverse );
}
render( renderer, writeBuffer, readBuffer, deltaTime, maskActive ) {
if ( this.selectedObjects.length > 0 ) {
renderer.getClearColor( this._oldClearColor );
this.oldClearAlpha = renderer.getClearAlpha();
const oldAutoClear = renderer.autoClear;
renderer.autoClear = false;
if ( maskActive ) renderer.state.buffers.stencil.setTest( false );
renderer.setClearColor( 0xffffff, 1 );
// Make selected objects invisible
this.changeVisibilityOfSelectedObjects( false );
const currentBackground = this.renderScene.background;
this.renderScene.background = null;
// 1. Draw Non Selected objects in the depth buffer
this.renderScene.overrideMaterial = this.depthMaterial;
renderer.setRenderTarget( this.renderTargetDepthBuffer );
renderer.clear();
renderer.render( this.renderScene, this.renderCamera );
// Make selected objects visible
this.changeVisibilityOfSelectedObjects( true );
this._visibilityCache.clear();
// Update Texture Matrix for Depth compare
this.updateTextureMatrix();
// Make non selected objects invisible, and draw only the selected objects, by comparing the depth buffer of non selected objects
this.changeVisibilityOfNonSelectedObjects( false );
this.renderScene.overrideMaterial = this.prepareMaskMaterial;
this.prepareMaskMaterial.uniforms[ 'cameraNearFar' ].value.set( this.renderCamera.near, this.renderCamera.far );
this.prepareMaskMaterial.uniforms[ 'depthTexture' ].value = this.renderTargetDepthBuffer.texture;
this.prepareMaskMaterial.uniforms[ 'textureMatrix' ].value = this.textureMatrix;
renderer.setRenderTarget( this.renderTargetMaskBuffer );
renderer.clear();
renderer.render( this.renderScene, this.renderCamera );
this.renderScene.overrideMaterial = null;
this.changeVisibilityOfNonSelectedObjects( true );
this._visibilityCache.clear();
this.renderScene.background = currentBackground;
// 2. Downsample to Half resolution
this.fsQuad.material = this.materialCopy;
this.copyUniforms[ 'tDiffuse' ].value = this.renderTargetMaskBuffer.texture;
renderer.setRenderTarget( this.renderTargetMaskDownSampleBuffer );
renderer.clear();
this.fsQuad.render( renderer );
this.tempPulseColor1.copy( this.visibleEdgeColor );
this.tempPulseColor2.copy( this.hiddenEdgeColor );
if ( this.pulsePeriod > 0 ) {
const scalar = ( 1 + 0.25 ) / 2 + Math.cos( performance.now() * 0.01 / this.pulsePeriod ) * ( 1.0 - 0.25 ) / 2;
this.tempPulseColor1.multiplyScalar( scalar );
this.tempPulseColor2.multiplyScalar( scalar );
}
// 3. Apply Edge Detection Pass
this.fsQuad.material = this.edgeDetectionMaterial;
this.edgeDetectionMaterial.uniforms[ 'maskTexture' ].value = this.renderTargetMaskDownSampleBuffer.texture;
this.edgeDetectionMaterial.uniforms[ 'texSize' ].value.set( this.renderTargetMaskDownSampleBuffer.width, this.renderTargetMaskDownSampleBuffer.height );
this.edgeDetectionMaterial.uniforms[ 'visibleEdgeColor' ].value = this.tempPulseColor1;
this.edgeDetectionMaterial.uniforms[ 'hiddenEdgeColor' ].value = this.tempPulseColor2;
renderer.setRenderTarget( this.renderTargetEdgeBuffer1 );
renderer.clear();
this.fsQuad.render( renderer );
// 4. Apply Blur on Half res
this.fsQuad.material = this.separableBlurMaterial1;
this.separableBlurMaterial1.uniforms[ 'colorTexture' ].value = this.renderTargetEdgeBuffer1.texture;
this.separableBlurMaterial1.uniforms[ 'direction' ].value = OutlinePass.BlurDirectionX;
this.separableBlurMaterial1.uniforms[ 'kernelRadius' ].value = this.edgeThickness;
renderer.setRenderTarget( this.renderTargetBlurBuffer1 );
renderer.clear();
this.fsQuad.render( renderer );
this.separableBlurMaterial1.uniforms[ 'colorTexture' ].value = this.renderTargetBlurBuffer1.texture;
this.separableBlurMaterial1.uniforms[ 'direction' ].value = OutlinePass.BlurDirectionY;
renderer.setRenderTarget( this.renderTargetEdgeBuffer1 );
renderer.clear();
this.fsQuad.render( renderer );
// Apply Blur on quarter res
this.fsQuad.material = this.separableBlurMaterial2;
this.separableBlurMaterial2.uniforms[ 'colorTexture' ].value = this.renderTargetEdgeBuffer1.texture;
this.separableBlurMaterial2.uniforms[ 'direction' ].value = OutlinePass.BlurDirectionX;
renderer.setRenderTarget( this.renderTargetBlurBuffer2 );
renderer.clear();
this.fsQuad.render( renderer );
this.separableBlurMaterial2.uniforms[ 'colorTexture' ].value = this.renderTargetBlurBuffer2.texture;
this.separableBlurMaterial2.uniforms[ 'direction' ].value = OutlinePass.BlurDirectionY;
renderer.setRenderTarget( this.renderTargetEdgeBuffer2 );
renderer.clear();
this.fsQuad.render( renderer );
// Blend it additively over the input texture
this.fsQuad.material = this.overlayMaterial;
this.overlayMaterial.uniforms[ 'maskTexture' ].value = this.renderTargetMaskBuffer.texture;
this.overlayMaterial.uniforms[ 'edgeTexture1' ].value = this.renderTargetEdgeBuffer1.texture;
this.overlayMaterial.uniforms[ 'edgeTexture2' ].value = this.renderTargetEdgeBuffer2.texture;
this.overlayMaterial.uniforms[ 'patternTexture' ].value = this.patternTexture;
this.overlayMaterial.uniforms[ 'edgeStrength' ].value = this.edgeStrength;
this.overlayMaterial.uniforms[ 'edgeGlow' ].value = this.edgeGlow;
this.overlayMaterial.uniforms[ 'usePatternTexture' ].value = this.usePatternTexture;
if ( maskActive ) renderer.state.buffers.stencil.setTest( true );
renderer.setRenderTarget( readBuffer );
this.fsQuad.render( renderer );
renderer.setClearColor( this._oldClearColor, this.oldClearAlpha );
renderer.autoClear = oldAutoClear;
}
if ( this.renderToScreen ) {
this.fsQuad.material = this.materialCopy;
this.copyUniforms[ 'tDiffuse' ].value = readBuffer.texture;
renderer.setRenderTarget( null );
this.fsQuad.render( renderer );
}
}
getPrepareMaskMaterial() {
return new ShaderMaterial( {
uniforms: {
'depthTexture': { value: null },
'cameraNearFar': { value: new Vector2( 0.5, 0.5 ) },
'textureMatrix': { value: null }
},
vertexShader:
`#include <morphtarget_pars_vertex>
#include <skinning_pars_vertex>
varying vec4 projTexCoord;
varying vec4 vPosition;
uniform mat4 textureMatrix;
void main() {
#include <skinbase_vertex>
#include <begin_vertex>
#include <morphtarget_vertex>
#include <skinning_vertex>
#include <project_vertex>
vPosition = mvPosition;
vec4 worldPosition = vec4( transformed, 1.0 );
#ifdef USE_INSTANCING
worldPosition = instanceMatrix * worldPosition;
#endif
worldPosition = modelMatrix * worldPosition;
projTexCoord = textureMatrix * worldPosition;
}`,
fragmentShader:
`#include <packing>
varying vec4 vPosition;
varying vec4 projTexCoord;
uniform sampler2D depthTexture;
uniform vec2 cameraNearFar;
void main() {
float depth = unpackRGBAToDepth(texture2DProj( depthTexture, projTexCoord ));
float viewZ = - DEPTH_TO_VIEW_Z( depth, cameraNearFar.x, cameraNearFar.y );
float depthTest = (-vPosition.z > viewZ) ? 1.0 : 0.0;
gl_FragColor = vec4(0.0, depthTest, 1.0, 1.0);
}`
} );
}
getEdgeDetectionMaterial() {
return new ShaderMaterial( {
uniforms: {
'maskTexture': { value: null },
'texSize': { value: new Vector2( 0.5, 0.5 ) },
'visibleEdgeColor': { value: new Vector3( 1.0, 1.0, 1.0 ) },
'hiddenEdgeColor': { value: new Vector3( 1.0, 1.0, 1.0 ) },
},
vertexShader:
`varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader:
`varying vec2 vUv;
uniform sampler2D maskTexture;
uniform vec2 texSize;
uniform vec3 visibleEdgeColor;
uniform vec3 hiddenEdgeColor;
void main() {
vec2 invSize = 1.0 / texSize;
vec4 uvOffset = vec4(1.0, 0.0, 0.0, 1.0) * vec4(invSize, invSize);
vec4 c1 = texture2D( maskTexture, vUv + uvOffset.xy);
vec4 c2 = texture2D( maskTexture, vUv - uvOffset.xy);
vec4 c3 = texture2D( maskTexture, vUv + uvOffset.yw);
vec4 c4 = texture2D( maskTexture, vUv - uvOffset.yw);
float diff1 = (c1.r - c2.r)*0.5;
float diff2 = (c3.r - c4.r)*0.5;
float d = length( vec2(diff1, diff2) );
float a1 = min(c1.g, c2.g);
float a2 = min(c3.g, c4.g);
float visibilityFactor = min(a1, a2);
vec3 edgeColor = 1.0 - visibilityFactor > 0.001 ? visibleEdgeColor : hiddenEdgeColor;
gl_FragColor = vec4(edgeColor, 1.0) * vec4(d);
}`
} );
}
getSeperableBlurMaterial( maxRadius ) {
return new ShaderMaterial( {
defines: {
'MAX_RADIUS': maxRadius,
},
uniforms: {
'colorTexture': { value: null },
'texSize': { value: new Vector2( 0.5, 0.5 ) },
'direction': { value: new Vector2( 0.5, 0.5 ) },
'kernelRadius': { value: 1.0 }
},
vertexShader:
`varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader:
`#include <common>
varying vec2 vUv;
uniform sampler2D colorTexture;
uniform vec2 texSize;
uniform vec2 direction;
uniform float kernelRadius;
float gaussianPdf(in float x, in float sigma) {
return 0.39894 * exp( -0.5 * x * x/( sigma * sigma))/sigma;
}
void main() {
vec2 invSize = 1.0 / texSize;
float sigma = kernelRadius/2.0;
float weightSum = gaussianPdf(0.0, sigma);
vec4 diffuseSum = texture2D( colorTexture, vUv) * weightSum;
vec2 delta = direction * invSize * kernelRadius/float(MAX_RADIUS);
vec2 uvOffset = delta;
for( int i = 1; i <= MAX_RADIUS; i ++ ) {
float x = kernelRadius * float(i) / float(MAX_RADIUS);
float w = gaussianPdf(x, sigma);
vec4 sample1 = texture2D( colorTexture, vUv + uvOffset);
vec4 sample2 = texture2D( colorTexture, vUv - uvOffset);
diffuseSum += ((sample1 + sample2) * w);
weightSum += (2.0 * w);
uvOffset += delta;
}
gl_FragColor = diffuseSum/weightSum;
}`
} );
}
getOverlayMaterial() {
return new ShaderMaterial( {
uniforms: {
'maskTexture': { value: null },
'edgeTexture1': { value: null },
'edgeTexture2': { value: null },
'patternTexture': { value: null },
'edgeStrength': { value: 1.0 },
'edgeGlow': { value: 1.0 },
'usePatternTexture': { value: 0.0 }
},
vertexShader:
`varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader:
`varying vec2 vUv;
uniform sampler2D maskTexture;
uniform sampler2D edgeTexture1;
uniform sampler2D edgeTexture2;
uniform sampler2D patternTexture;
uniform float edgeStrength;
uniform float edgeGlow;
uniform bool usePatternTexture;
void main() {
vec4 edgeValue1 = texture2D(edgeTexture1, vUv);
vec4 edgeValue2 = texture2D(edgeTexture2, vUv);
vec4 maskColor = texture2D(maskTexture, vUv);
vec4 patternColor = texture2D(patternTexture, 6.0 * vUv);
float visibilityFactor = 1.0 - maskColor.g > 0.0 ? 1.0 : 0.5;
vec4 edgeValue = edgeValue1 + edgeValue2 * edgeGlow;
vec4 finalColor = edgeStrength * maskColor.r * edgeValue;
if(usePatternTexture)
finalColor += + visibilityFactor * (1.0 - maskColor.r) * (1.0 - patternColor.r);
gl_FragColor = finalColor;
}`,
blending: AdditiveBlending,
depthTest: false,
depthWrite: false,
transparent: true
} );
}
}
OutlinePass.BlurDirectionX = new Vector2( 1.0, 0.0 );
OutlinePass.BlurDirectionY = new Vector2( 0.0, 1.0 );
export { OutlinePass };

View File

@ -0,0 +1,97 @@
import {
ColorManagement,
RawShaderMaterial,
UniformsUtils,
LinearToneMapping,
ReinhardToneMapping,
CineonToneMapping,
AgXToneMapping,
ACESFilmicToneMapping,
NeutralToneMapping,
SRGBTransfer
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from './Pass.js';
import { OutputShader } from '../shaders/OutputShader.js';
class OutputPass extends Pass {
constructor() {
super();
//
const shader = OutputShader;
this.uniforms = UniformsUtils.clone( shader.uniforms );
this.material = new RawShaderMaterial( {
name: shader.name,
uniforms: this.uniforms,
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader
} );
this.fsQuad = new FullScreenQuad( this.material );
// internal cache
this._outputColorSpace = null;
this._toneMapping = null;
}
render( renderer, writeBuffer, readBuffer/*, deltaTime, maskActive */ ) {
this.uniforms[ 'tDiffuse' ].value = readBuffer.texture;
this.uniforms[ 'toneMappingExposure' ].value = renderer.toneMappingExposure;
// rebuild defines if required
if ( this._outputColorSpace !== renderer.outputColorSpace || this._toneMapping !== renderer.toneMapping ) {
this._outputColorSpace = renderer.outputColorSpace;
this._toneMapping = renderer.toneMapping;
this.material.defines = {};
if ( ColorManagement.getTransfer( this._outputColorSpace ) === SRGBTransfer ) this.material.defines.SRGB_TRANSFER = '';
if ( this._toneMapping === LinearToneMapping ) this.material.defines.LINEAR_TONE_MAPPING = '';
else if ( this._toneMapping === ReinhardToneMapping ) this.material.defines.REINHARD_TONE_MAPPING = '';
else if ( this._toneMapping === CineonToneMapping ) this.material.defines.CINEON_TONE_MAPPING = '';
else if ( this._toneMapping === ACESFilmicToneMapping ) this.material.defines.ACES_FILMIC_TONE_MAPPING = '';
else if ( this._toneMapping === AgXToneMapping ) this.material.defines.AGX_TONE_MAPPING = '';
else if ( this._toneMapping === NeutralToneMapping ) this.material.defines.NEUTRAL_TONE_MAPPING = '';
this.material.needsUpdate = true;
}
//
if ( this.renderToScreen === true ) {
renderer.setRenderTarget( null );
this.fsQuad.render( renderer );
} else {
renderer.setRenderTarget( writeBuffer );
if ( this.clear ) renderer.clear( renderer.autoClearColor, renderer.autoClearDepth, renderer.autoClearStencil );
this.fsQuad.render( renderer );
}
}
dispose() {
this.material.dispose();
this.fsQuad.dispose();
}
}
export { OutputPass };

View File

@ -0,0 +1,85 @@
const OutputShader = {
name: 'OutputShader',
uniforms: {
'tDiffuse': { value: null },
'toneMappingExposure': { value: 1 }
},
vertexShader: /* glsl */`
precision highp float;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
attribute vec3 position;
attribute vec2 uv;
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
precision highp float;
uniform sampler2D tDiffuse;
#include <tonemapping_pars_fragment>
#include <colorspace_pars_fragment>
varying vec2 vUv;
void main() {
gl_FragColor = texture2D( tDiffuse, vUv );
// tone mapping
#ifdef LINEAR_TONE_MAPPING
gl_FragColor.rgb = LinearToneMapping( gl_FragColor.rgb );
#elif defined( REINHARD_TONE_MAPPING )
gl_FragColor.rgb = ReinhardToneMapping( gl_FragColor.rgb );
#elif defined( CINEON_TONE_MAPPING )
gl_FragColor.rgb = OptimizedCineonToneMapping( gl_FragColor.rgb );
#elif defined( ACES_FILMIC_TONE_MAPPING )
gl_FragColor.rgb = ACESFilmicToneMapping( gl_FragColor.rgb );
#elif defined( AGX_TONE_MAPPING )
gl_FragColor.rgb = AgXToneMapping( gl_FragColor.rgb );
#elif defined( NEUTRAL_TONE_MAPPING )
gl_FragColor.rgb = NeutralToneMapping( gl_FragColor.rgb );
#endif
// color space
#ifdef SRGB_TRANSFER
gl_FragColor = sRGBTransferOETF( gl_FragColor );
#endif
}`
};
export { OutputShader };

View File

@ -0,0 +1,467 @@
import {
BufferGeometry,
Color,
FileLoader,
Float32BufferAttribute,
Int32BufferAttribute,
Loader,
Points,
PointsMaterial
} from '/static/javascript/three/build/three.module.js';
class PCDLoader extends Loader {
constructor( manager ) {
super( manager );
this.littleEndian = true;
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( data ) {
try {
onLoad( scope.parse( data ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
parse( data ) {
// from https://gitlab.com/taketwo/three-pcd-loader/blob/master/decompress-lzf.js
function decompressLZF( inData, outLength ) {
const inLength = inData.length;
const outData = new Uint8Array( outLength );
let inPtr = 0;
let outPtr = 0;
let ctrl;
let len;
let ref;
do {
ctrl = inData[ inPtr ++ ];
if ( ctrl < ( 1 << 5 ) ) {
ctrl ++;
if ( outPtr + ctrl > outLength ) throw new Error( 'Output buffer is not large enough' );
if ( inPtr + ctrl > inLength ) throw new Error( 'Invalid compressed data' );
do {
outData[ outPtr ++ ] = inData[ inPtr ++ ];
} while ( -- ctrl );
} else {
len = ctrl >> 5;
ref = outPtr - ( ( ctrl & 0x1f ) << 8 ) - 1;
if ( inPtr >= inLength ) throw new Error( 'Invalid compressed data' );
if ( len === 7 ) {
len += inData[ inPtr ++ ];
if ( inPtr >= inLength ) throw new Error( 'Invalid compressed data' );
}
ref -= inData[ inPtr ++ ];
if ( outPtr + len + 2 > outLength ) throw new Error( 'Output buffer is not large enough' );
if ( ref < 0 ) throw new Error( 'Invalid compressed data' );
if ( ref >= outPtr ) throw new Error( 'Invalid compressed data' );
do {
outData[ outPtr ++ ] = outData[ ref ++ ];
} while ( -- len + 2 );
}
} while ( inPtr < inLength );
return outData;
}
function parseHeader( data ) {
const PCDheader = {};
const result1 = data.search( /[\r\n]DATA\s(\S*)\s/i );
const result2 = /[\r\n]DATA\s(\S*)\s/i.exec( data.slice( result1 - 1 ) );
PCDheader.data = result2[ 1 ];
PCDheader.headerLen = result2[ 0 ].length + result1;
PCDheader.str = data.slice( 0, PCDheader.headerLen );
// remove comments
PCDheader.str = PCDheader.str.replace( /#.*/gi, '' );
// parse
PCDheader.version = /VERSION (.*)/i.exec( PCDheader.str );
PCDheader.fields = /FIELDS (.*)/i.exec( PCDheader.str );
PCDheader.size = /SIZE (.*)/i.exec( PCDheader.str );
PCDheader.type = /TYPE (.*)/i.exec( PCDheader.str );
PCDheader.count = /COUNT (.*)/i.exec( PCDheader.str );
PCDheader.width = /WIDTH (.*)/i.exec( PCDheader.str );
PCDheader.height = /HEIGHT (.*)/i.exec( PCDheader.str );
PCDheader.viewpoint = /VIEWPOINT (.*)/i.exec( PCDheader.str );
PCDheader.points = /POINTS (.*)/i.exec( PCDheader.str );
// evaluate
if ( PCDheader.version !== null )
PCDheader.version = parseFloat( PCDheader.version[ 1 ] );
PCDheader.fields = ( PCDheader.fields !== null ) ? PCDheader.fields[ 1 ].split( ' ' ) : [];
if ( PCDheader.type !== null )
PCDheader.type = PCDheader.type[ 1 ].split( ' ' );
if ( PCDheader.width !== null )
PCDheader.width = parseInt( PCDheader.width[ 1 ] );
if ( PCDheader.height !== null )
PCDheader.height = parseInt( PCDheader.height[ 1 ] );
if ( PCDheader.viewpoint !== null )
PCDheader.viewpoint = PCDheader.viewpoint[ 1 ];
if ( PCDheader.points !== null )
PCDheader.points = parseInt( PCDheader.points[ 1 ], 10 );
if ( PCDheader.points === null )
PCDheader.points = PCDheader.width * PCDheader.height;
if ( PCDheader.size !== null ) {
PCDheader.size = PCDheader.size[ 1 ].split( ' ' ).map( function ( x ) {
return parseInt( x, 10 );
} );
}
if ( PCDheader.count !== null ) {
PCDheader.count = PCDheader.count[ 1 ].split( ' ' ).map( function ( x ) {
return parseInt( x, 10 );
} );
} else {
PCDheader.count = [];
for ( let i = 0, l = PCDheader.fields.length; i < l; i ++ ) {
PCDheader.count.push( 1 );
}
}
PCDheader.offset = {};
let sizeSum = 0;
for ( let i = 0, l = PCDheader.fields.length; i < l; i ++ ) {
if ( PCDheader.data === 'ascii' ) {
PCDheader.offset[ PCDheader.fields[ i ] ] = i;
} else {
PCDheader.offset[ PCDheader.fields[ i ] ] = sizeSum;
sizeSum += PCDheader.size[ i ] * PCDheader.count[ i ];
}
}
// for binary only
PCDheader.rowSize = sizeSum;
return PCDheader;
}
const textData = new TextDecoder().decode( data );
// parse header (always ascii format)
const PCDheader = parseHeader( textData );
// parse data
const position = [];
const normal = [];
const color = [];
const intensity = [];
const label = [];
const c = new Color();
// ascii
if ( PCDheader.data === 'ascii' ) {
const offset = PCDheader.offset;
const pcdData = textData.slice( PCDheader.headerLen );
const lines = pcdData.split( '\n' );
for ( let i = 0, l = lines.length; i < l; i ++ ) {
if ( lines[ i ] === '' ) continue;
const line = lines[ i ].split( ' ' );
if ( offset.x !== undefined ) {
position.push( parseFloat( line[ offset.x ] ) );
position.push( parseFloat( line[ offset.y ] ) );
position.push( parseFloat( line[ offset.z ] ) );
}
if ( offset.rgb !== undefined ) {
const rgb_field_index = PCDheader.fields.findIndex( ( field ) => field === 'rgb' );
const rgb_type = PCDheader.type[ rgb_field_index ];
const float = parseFloat( line[ offset.rgb ] );
let rgb = float;
if ( rgb_type === 'F' ) {
// treat float values as int
// https://github.com/daavoo/pyntcloud/pull/204/commits/7b4205e64d5ed09abe708b2e91b615690c24d518
const farr = new Float32Array( 1 );
farr[ 0 ] = float;
rgb = new Int32Array( farr.buffer )[ 0 ];
}
const r = ( ( rgb >> 16 ) & 0x0000ff ) / 255;
const g = ( ( rgb >> 8 ) & 0x0000ff ) / 255;
const b = ( ( rgb >> 0 ) & 0x0000ff ) / 255;
c.set( r, g, b ).convertSRGBToLinear();
color.push( c.r, c.g, c.b );
}
if ( offset.normal_x !== undefined ) {
normal.push( parseFloat( line[ offset.normal_x ] ) );
normal.push( parseFloat( line[ offset.normal_y ] ) );
normal.push( parseFloat( line[ offset.normal_z ] ) );
}
if ( offset.intensity !== undefined ) {
intensity.push( parseFloat( line[ offset.intensity ] ) );
}
if ( offset.label !== undefined ) {
label.push( parseInt( line[ offset.label ] ) );
}
}
}
// binary-compressed
// normally data in PCD files are organized as array of structures: XYZRGBXYZRGB
// binary compressed PCD files organize their data as structure of arrays: XXYYZZRGBRGB
// that requires a totally different parsing approach compared to non-compressed data
if ( PCDheader.data === 'binary_compressed' ) {
const sizes = new Uint32Array( data.slice( PCDheader.headerLen, PCDheader.headerLen + 8 ) );
const compressedSize = sizes[ 0 ];
const decompressedSize = sizes[ 1 ];
const decompressed = decompressLZF( new Uint8Array( data, PCDheader.headerLen + 8, compressedSize ), decompressedSize );
const dataview = new DataView( decompressed.buffer );
const offset = PCDheader.offset;
for ( let i = 0; i < PCDheader.points; i ++ ) {
if ( offset.x !== undefined ) {
const xIndex = PCDheader.fields.indexOf( 'x' );
const yIndex = PCDheader.fields.indexOf( 'y' );
const zIndex = PCDheader.fields.indexOf( 'z' );
position.push( dataview.getFloat32( ( PCDheader.points * offset.x ) + PCDheader.size[ xIndex ] * i, this.littleEndian ) );
position.push( dataview.getFloat32( ( PCDheader.points * offset.y ) + PCDheader.size[ yIndex ] * i, this.littleEndian ) );
position.push( dataview.getFloat32( ( PCDheader.points * offset.z ) + PCDheader.size[ zIndex ] * i, this.littleEndian ) );
}
if ( offset.rgb !== undefined ) {
const rgbIndex = PCDheader.fields.indexOf( 'rgb' );
const r = dataview.getUint8( ( PCDheader.points * offset.rgb ) + PCDheader.size[ rgbIndex ] * i + 2 ) / 255.0;
const g = dataview.getUint8( ( PCDheader.points * offset.rgb ) + PCDheader.size[ rgbIndex ] * i + 1 ) / 255.0;
const b = dataview.getUint8( ( PCDheader.points * offset.rgb ) + PCDheader.size[ rgbIndex ] * i + 0 ) / 255.0;
c.set( r, g, b ).convertSRGBToLinear();
color.push( c.r, c.g, c.b );
}
if ( offset.normal_x !== undefined ) {
const xIndex = PCDheader.fields.indexOf( 'normal_x' );
const yIndex = PCDheader.fields.indexOf( 'normal_y' );
const zIndex = PCDheader.fields.indexOf( 'normal_z' );
normal.push( dataview.getFloat32( ( PCDheader.points * offset.normal_x ) + PCDheader.size[ xIndex ] * i, this.littleEndian ) );
normal.push( dataview.getFloat32( ( PCDheader.points * offset.normal_y ) + PCDheader.size[ yIndex ] * i, this.littleEndian ) );
normal.push( dataview.getFloat32( ( PCDheader.points * offset.normal_z ) + PCDheader.size[ zIndex ] * i, this.littleEndian ) );
}
if ( offset.intensity !== undefined ) {
const intensityIndex = PCDheader.fields.indexOf( 'intensity' );
intensity.push( dataview.getFloat32( ( PCDheader.points * offset.intensity ) + PCDheader.size[ intensityIndex ] * i, this.littleEndian ) );
}
if ( offset.label !== undefined ) {
const labelIndex = PCDheader.fields.indexOf( 'label' );
label.push( dataview.getInt32( ( PCDheader.points * offset.label ) + PCDheader.size[ labelIndex ] * i, this.littleEndian ) );
}
}
}
// binary
if ( PCDheader.data === 'binary' ) {
const dataview = new DataView( data, PCDheader.headerLen );
const offset = PCDheader.offset;
for ( let i = 0, row = 0; i < PCDheader.points; i ++, row += PCDheader.rowSize ) {
if ( offset.x !== undefined ) {
position.push( dataview.getFloat32( row + offset.x, this.littleEndian ) );
position.push( dataview.getFloat32( row + offset.y, this.littleEndian ) );
position.push( dataview.getFloat32( row + offset.z, this.littleEndian ) );
}
if ( offset.rgb !== undefined ) {
const r = dataview.getUint8( row + offset.rgb + 2 ) / 255.0;
const g = dataview.getUint8( row + offset.rgb + 1 ) / 255.0;
const b = dataview.getUint8( row + offset.rgb + 0 ) / 255.0;
c.set( r, g, b ).convertSRGBToLinear();
color.push( c.r, c.g, c.b );
}
if ( offset.normal_x !== undefined ) {
normal.push( dataview.getFloat32( row + offset.normal_x, this.littleEndian ) );
normal.push( dataview.getFloat32( row + offset.normal_y, this.littleEndian ) );
normal.push( dataview.getFloat32( row + offset.normal_z, this.littleEndian ) );
}
if ( offset.intensity !== undefined ) {
intensity.push( dataview.getFloat32( row + offset.intensity, this.littleEndian ) );
}
if ( offset.label !== undefined ) {
label.push( dataview.getInt32( row + offset.label, this.littleEndian ) );
}
}
}
// build geometry
const geometry = new BufferGeometry();
if ( position.length > 0 ) geometry.setAttribute( 'position', new Float32BufferAttribute( position, 3 ) );
if ( normal.length > 0 ) geometry.setAttribute( 'normal', new Float32BufferAttribute( normal, 3 ) );
if ( color.length > 0 ) geometry.setAttribute( 'color', new Float32BufferAttribute( color, 3 ) );
if ( intensity.length > 0 ) geometry.setAttribute( 'intensity', new Float32BufferAttribute( intensity, 1 ) );
if ( label.length > 0 ) geometry.setAttribute( 'label', new Int32BufferAttribute( label, 1 ) );
geometry.computeBoundingSphere();
// build material
const material = new PointsMaterial( { size: 0.005 } );
if ( color.length > 0 ) {
material.vertexColors = true;
}
// build point cloud
return new Points( geometry, material );
}
}
export { PCDLoader };

View File

@ -0,0 +1,232 @@
import {
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Loader,
Color
} from '/static/javascript/three/build/three.module.js';
class PDBLoader extends Loader {
constructor( manager ) {
super( manager );
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( scope.manager );
loader.setPath( scope.path );
loader.setRequestHeader( scope.requestHeader );
loader.setWithCredentials( scope.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
// Based on CanvasMol PDB parser
parse( text ) {
function trim( text ) {
return text.replace( /^\s\s*/, '' ).replace( /\s\s*$/, '' );
}
function capitalize( text ) {
return text.charAt( 0 ).toUpperCase() + text.slice( 1 ).toLowerCase();
}
function hash( s, e ) {
return 's' + Math.min( s, e ) + 'e' + Math.max( s, e );
}
function parseBond( start, length, satom, i ) {
const eatom = parseInt( lines[ i ].slice( start, start + length ) );
if ( eatom ) {
const h = hash( satom, eatom );
if ( _bhash[ h ] === undefined ) {
_bonds.push( [ satom - 1, eatom - 1, 1 ] );
_bhash[ h ] = _bonds.length - 1;
} else {
// doesn't really work as almost all PDBs
// have just normal bonds appearing multiple
// times instead of being double/triple bonds
// bonds[bhash[h]][2] += 1;
}
}
}
function buildGeometry() {
const build = {
geometryAtoms: new BufferGeometry(),
geometryBonds: new BufferGeometry(),
json: {
atoms: atoms
}
};
const geometryAtoms = build.geometryAtoms;
const geometryBonds = build.geometryBonds;
const verticesAtoms = [];
const colorsAtoms = [];
const verticesBonds = [];
// atoms
const c = new Color();
for ( let i = 0, l = atoms.length; i < l; i ++ ) {
const atom = atoms[ i ];
const x = atom[ 0 ];
const y = atom[ 1 ];
const z = atom[ 2 ];
verticesAtoms.push( x, y, z );
const r = atom[ 3 ][ 0 ] / 255;
const g = atom[ 3 ][ 1 ] / 255;
const b = atom[ 3 ][ 2 ] / 255;
c.set( r, g, b ).convertSRGBToLinear();
colorsAtoms.push( c.r, c.g, c.b );
}
// bonds
for ( let i = 0, l = _bonds.length; i < l; i ++ ) {
const bond = _bonds[ i ];
const start = bond[ 0 ];
const end = bond[ 1 ];
const startAtom = _atomMap[ start ];
const endAtom = _atomMap[ end ];
let x = startAtom[ 0 ];
let y = startAtom[ 1 ];
let z = startAtom[ 2 ];
verticesBonds.push( x, y, z );
x = endAtom[ 0 ];
y = endAtom[ 1 ];
z = endAtom[ 2 ];
verticesBonds.push( x, y, z );
}
// build geometry
geometryAtoms.setAttribute( 'position', new Float32BufferAttribute( verticesAtoms, 3 ) );
geometryAtoms.setAttribute( 'color', new Float32BufferAttribute( colorsAtoms, 3 ) );
geometryBonds.setAttribute( 'position', new Float32BufferAttribute( verticesBonds, 3 ) );
return build;
}
const CPK = { h: [ 255, 255, 255 ], he: [ 217, 255, 255 ], li: [ 204, 128, 255 ], be: [ 194, 255, 0 ], b: [ 255, 181, 181 ], c: [ 144, 144, 144 ], n: [ 48, 80, 248 ], o: [ 255, 13, 13 ], f: [ 144, 224, 80 ], ne: [ 179, 227, 245 ], na: [ 171, 92, 242 ], mg: [ 138, 255, 0 ], al: [ 191, 166, 166 ], si: [ 240, 200, 160 ], p: [ 255, 128, 0 ], s: [ 255, 255, 48 ], cl: [ 31, 240, 31 ], ar: [ 128, 209, 227 ], k: [ 143, 64, 212 ], ca: [ 61, 255, 0 ], sc: [ 230, 230, 230 ], ti: [ 191, 194, 199 ], v: [ 166, 166, 171 ], cr: [ 138, 153, 199 ], mn: [ 156, 122, 199 ], fe: [ 224, 102, 51 ], co: [ 240, 144, 160 ], ni: [ 80, 208, 80 ], cu: [ 200, 128, 51 ], zn: [ 125, 128, 176 ], ga: [ 194, 143, 143 ], ge: [ 102, 143, 143 ], as: [ 189, 128, 227 ], se: [ 255, 161, 0 ], br: [ 166, 41, 41 ], kr: [ 92, 184, 209 ], rb: [ 112, 46, 176 ], sr: [ 0, 255, 0 ], y: [ 148, 255, 255 ], zr: [ 148, 224, 224 ], nb: [ 115, 194, 201 ], mo: [ 84, 181, 181 ], tc: [ 59, 158, 158 ], ru: [ 36, 143, 143 ], rh: [ 10, 125, 140 ], pd: [ 0, 105, 133 ], ag: [ 192, 192, 192 ], cd: [ 255, 217, 143 ], in: [ 166, 117, 115 ], sn: [ 102, 128, 128 ], sb: [ 158, 99, 181 ], te: [ 212, 122, 0 ], i: [ 148, 0, 148 ], xe: [ 66, 158, 176 ], cs: [ 87, 23, 143 ], ba: [ 0, 201, 0 ], la: [ 112, 212, 255 ], ce: [ 255, 255, 199 ], pr: [ 217, 255, 199 ], nd: [ 199, 255, 199 ], pm: [ 163, 255, 199 ], sm: [ 143, 255, 199 ], eu: [ 97, 255, 199 ], gd: [ 69, 255, 199 ], tb: [ 48, 255, 199 ], dy: [ 31, 255, 199 ], ho: [ 0, 255, 156 ], er: [ 0, 230, 117 ], tm: [ 0, 212, 82 ], yb: [ 0, 191, 56 ], lu: [ 0, 171, 36 ], hf: [ 77, 194, 255 ], ta: [ 77, 166, 255 ], w: [ 33, 148, 214 ], re: [ 38, 125, 171 ], os: [ 38, 102, 150 ], ir: [ 23, 84, 135 ], pt: [ 208, 208, 224 ], au: [ 255, 209, 35 ], hg: [ 184, 184, 208 ], tl: [ 166, 84, 77 ], pb: [ 87, 89, 97 ], bi: [ 158, 79, 181 ], po: [ 171, 92, 0 ], at: [ 117, 79, 69 ], rn: [ 66, 130, 150 ], fr: [ 66, 0, 102 ], ra: [ 0, 125, 0 ], ac: [ 112, 171, 250 ], th: [ 0, 186, 255 ], pa: [ 0, 161, 255 ], u: [ 0, 143, 255 ], np: [ 0, 128, 255 ], pu: [ 0, 107, 255 ], am: [ 84, 92, 242 ], cm: [ 120, 92, 227 ], bk: [ 138, 79, 227 ], cf: [ 161, 54, 212 ], es: [ 179, 31, 212 ], fm: [ 179, 31, 186 ], md: [ 179, 13, 166 ], no: [ 189, 13, 135 ], lr: [ 199, 0, 102 ], rf: [ 204, 0, 89 ], db: [ 209, 0, 79 ], sg: [ 217, 0, 69 ], bh: [ 224, 0, 56 ], hs: [ 230, 0, 46 ], mt: [ 235, 0, 38 ], ds: [ 235, 0, 38 ], rg: [ 235, 0, 38 ], cn: [ 235, 0, 38 ], uut: [ 235, 0, 38 ], uuq: [ 235, 0, 38 ], uup: [ 235, 0, 38 ], uuh: [ 235, 0, 38 ], uus: [ 235, 0, 38 ], uuo: [ 235, 0, 38 ] };
const atoms = [];
const _bonds = [];
const _bhash = {};
const _atomMap = {};
// parse
const lines = text.split( '\n' );
for ( let i = 0, l = lines.length; i < l; i ++ ) {
if ( lines[ i ].slice( 0, 4 ) === 'ATOM' || lines[ i ].slice( 0, 6 ) === 'HETATM' ) {
const x = parseFloat( lines[ i ].slice( 30, 37 ) );
const y = parseFloat( lines[ i ].slice( 38, 45 ) );
const z = parseFloat( lines[ i ].slice( 46, 53 ) );
const index = parseInt( lines[ i ].slice( 6, 11 ) ) - 1;
let e = trim( lines[ i ].slice( 76, 78 ) ).toLowerCase();
if ( e === '' ) {
e = trim( lines[ i ].slice( 12, 14 ) ).toLowerCase();
}
const atomData = [ x, y, z, CPK[ e ], capitalize( e ) ];
atoms.push( atomData );
_atomMap[ index ] = atomData;
} else if ( lines[ i ].slice( 0, 6 ) === 'CONECT' ) {
const satom = parseInt( lines[ i ].slice( 6, 11 ) );
parseBond( 11, 5, satom, i );
parseBond( 16, 5, satom, i );
parseBond( 21, 5, satom, i );
parseBond( 26, 5, satom, i );
}
}
// build and return geometry
return buildGeometry();
}
}
export { PDBLoader };

View File

@ -0,0 +1,771 @@
import {
BufferGeometry,
FileLoader,
Float32BufferAttribute,
Loader,
Color
} from '/static/javascript/three/build/three.module.js';
/**
* Description: A THREE loader for PLY ASCII files (known as the Polygon
* File Format or the Stanford Triangle Format).
*
* Limitations: ASCII decoding assumes file is UTF-8.
*
* Usage:
* const loader = new PLYLoader();
* loader.load('./models/ply/ascii/dolphins.ply', function (geometry) {
*
* scene.add( new THREE.Mesh( geometry ) );
*
* } );
*
* If the PLY file uses non standard property names, they can be mapped while
* loading. For example, the following maps the properties
* “diffuse_(red|green|blue)” in the file to standard color names.
*
* loader.setPropertyNameMapping( {
* diffuse_red: 'red',
* diffuse_green: 'green',
* diffuse_blue: 'blue'
* } );
*
* Custom properties outside of the defaults for position, uv, normal
* and color attributes can be added using the setCustomPropertyNameMapping method.
* For example, the following maps the element properties “custom_property_a”
* and “custom_property_b” to an attribute “customAttribute” with an item size of 2.
* Attribute item sizes are set from the number of element properties in the property array.
*
* loader.setCustomPropertyNameMapping( {
* customAttribute: ['custom_property_a', 'custom_property_b'],
* } );
*
*/
const _color = new Color();
class PLYLoader extends Loader {
constructor( manager ) {
super( manager );
this.propertyNameMapping = {};
this.customPropertyMapping = {};
}
load( url, onLoad, onProgress, onError ) {
const scope = this;
const loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
loader.setWithCredentials( this.withCredentials );
loader.load( url, function ( text ) {
try {
onLoad( scope.parse( text ) );
} catch ( e ) {
if ( onError ) {
onError( e );
} else {
console.error( e );
}
scope.manager.itemError( url );
}
}, onProgress, onError );
}
setPropertyNameMapping( mapping ) {
this.propertyNameMapping = mapping;
}
setCustomPropertyNameMapping( mapping ) {
this.customPropertyMapping = mapping;
}
parse( data ) {
function parseHeader( data, headerLength = 0 ) {
const patternHeader = /^ply([\s\S]*)end_header(\r\n|\r|\n)/;
let headerText = '';
const result = patternHeader.exec( data );
if ( result !== null ) {
headerText = result[ 1 ];
}
const header = {
comments: [],
elements: [],
headerLength: headerLength,
objInfo: ''
};
const lines = headerText.split( /\r\n|\r|\n/ );
let currentElement;
function make_ply_element_property( propertValues, propertyNameMapping ) {
const property = { type: propertValues[ 0 ] };
if ( property.type === 'list' ) {
property.name = propertValues[ 3 ];
property.countType = propertValues[ 1 ];
property.itemType = propertValues[ 2 ];
} else {
property.name = propertValues[ 1 ];
}
if ( property.name in propertyNameMapping ) {
property.name = propertyNameMapping[ property.name ];
}
return property;
}
for ( let i = 0; i < lines.length; i ++ ) {
let line = lines[ i ];
line = line.trim();
if ( line === '' ) continue;
const lineValues = line.split( /\s+/ );
const lineType = lineValues.shift();
line = lineValues.join( ' ' );
switch ( lineType ) {
case 'format':
header.format = lineValues[ 0 ];
header.version = lineValues[ 1 ];
break;
case 'comment':
header.comments.push( line );
break;
case 'element':
if ( currentElement !== undefined ) {
header.elements.push( currentElement );
}
currentElement = {};
currentElement.name = lineValues[ 0 ];
currentElement.count = parseInt( lineValues[ 1 ] );
currentElement.properties = [];
break;
case 'property':
currentElement.properties.push( make_ply_element_property( lineValues, scope.propertyNameMapping ) );
break;
case 'obj_info':
header.objInfo = line;
break;
default:
console.log( 'unhandled', lineType, lineValues );
}
}
if ( currentElement !== undefined ) {
header.elements.push( currentElement );
}
return header;
}
function parseASCIINumber( n, type ) {
switch ( type ) {
case 'char': case 'uchar': case 'short': case 'ushort': case 'int': case 'uint':
case 'int8': case 'uint8': case 'int16': case 'uint16': case 'int32': case 'uint32':
return parseInt( n );
case 'float': case 'double': case 'float32': case 'float64':
return parseFloat( n );
}
}
function parseASCIIElement( properties, tokens ) {
const element = {};
for ( let i = 0; i < properties.length; i ++ ) {
if ( tokens.empty() ) return null;
if ( properties[ i ].type === 'list' ) {
const list = [];
const n = parseASCIINumber( tokens.next(), properties[ i ].countType );
for ( let j = 0; j < n; j ++ ) {
if ( tokens.empty() ) return null;
list.push( parseASCIINumber( tokens.next(), properties[ i ].itemType ) );
}
element[ properties[ i ].name ] = list;
} else {
element[ properties[ i ].name ] = parseASCIINumber( tokens.next(), properties[ i ].type );
}
}
return element;
}
function createBuffer() {
const buffer = {
indices: [],
vertices: [],
normals: [],
uvs: [],
faceVertexUvs: [],
colors: [],
faceVertexColors: []
};
for ( const customProperty of Object.keys( scope.customPropertyMapping ) ) {
buffer[ customProperty ] = [];
}
return buffer;
}
function mapElementAttributes( properties ) {
const elementNames = properties.map( property => {
return property.name;
} );
function findAttrName( names ) {
for ( let i = 0, l = names.length; i < l; i ++ ) {
const name = names[ i ];
if ( elementNames.includes( name ) ) return name;
}
return null;
}
return {
attrX: findAttrName( [ 'x', 'px', 'posx' ] ) || 'x',
attrY: findAttrName( [ 'y', 'py', 'posy' ] ) || 'y',
attrZ: findAttrName( [ 'z', 'pz', 'posz' ] ) || 'z',
attrNX: findAttrName( [ 'nx', 'normalx' ] ),
attrNY: findAttrName( [ 'ny', 'normaly' ] ),
attrNZ: findAttrName( [ 'nz', 'normalz' ] ),
attrS: findAttrName( [ 's', 'u', 'texture_u', 'tx' ] ),
attrT: findAttrName( [ 't', 'v', 'texture_v', 'ty' ] ),
attrR: findAttrName( [ 'red', 'diffuse_red', 'r', 'diffuse_r' ] ),
attrG: findAttrName( [ 'green', 'diffuse_green', 'g', 'diffuse_g' ] ),
attrB: findAttrName( [ 'blue', 'diffuse_blue', 'b', 'diffuse_b' ] ),
};
}
function parseASCII( data, header ) {
// PLY ascii format specification, as per http://en.wikipedia.org/wiki/PLY_(file_format)
const buffer = createBuffer();
const patternBody = /end_header\s+(\S[\s\S]*\S|\S)\s*$/;
let body, matches;
if ( ( matches = patternBody.exec( data ) ) !== null ) {
body = matches[ 1 ].split( /\s+/ );
} else {
body = [ ];
}
const tokens = new ArrayStream( body );
loop: for ( let i = 0; i < header.elements.length; i ++ ) {
const elementDesc = header.elements[ i ];
const attributeMap = mapElementAttributes( elementDesc.properties );
for ( let j = 0; j < elementDesc.count; j ++ ) {
const element = parseASCIIElement( elementDesc.properties, tokens );
if ( ! element ) break loop;
handleElement( buffer, elementDesc.name, element, attributeMap );
}
}
return postProcess( buffer );
}
function postProcess( buffer ) {
let geometry = new BufferGeometry();
// mandatory buffer data
if ( buffer.indices.length > 0 ) {
geometry.setIndex( buffer.indices );
}
geometry.setAttribute( 'position', new Float32BufferAttribute( buffer.vertices, 3 ) );
// optional buffer data
if ( buffer.normals.length > 0 ) {
geometry.setAttribute( 'normal', new Float32BufferAttribute( buffer.normals, 3 ) );
}
if ( buffer.uvs.length > 0 ) {
geometry.setAttribute( 'uv', new Float32BufferAttribute( buffer.uvs, 2 ) );
}
if ( buffer.colors.length > 0 ) {
geometry.setAttribute( 'color', new Float32BufferAttribute( buffer.colors, 3 ) );
}
if ( buffer.faceVertexUvs.length > 0 || buffer.faceVertexColors.length > 0 ) {
geometry = geometry.toNonIndexed();
if ( buffer.faceVertexUvs.length > 0 ) geometry.setAttribute( 'uv', new Float32BufferAttribute( buffer.faceVertexUvs, 2 ) );
if ( buffer.faceVertexColors.length > 0 ) geometry.setAttribute( 'color', new Float32BufferAttribute( buffer.faceVertexColors, 3 ) );
}
// custom buffer data
for ( const customProperty of Object.keys( scope.customPropertyMapping ) ) {
if ( buffer[ customProperty ].length > 0 ) {
geometry.setAttribute(
customProperty,
new Float32BufferAttribute(
buffer[ customProperty ],
scope.customPropertyMapping[ customProperty ].length
)
);
}
}
geometry.computeBoundingSphere();
return geometry;
}
function handleElement( buffer, elementName, element, cacheEntry ) {
if ( elementName === 'vertex' ) {
buffer.vertices.push( element[ cacheEntry.attrX ], element[ cacheEntry.attrY ], element[ cacheEntry.attrZ ] );
if ( cacheEntry.attrNX !== null && cacheEntry.attrNY !== null && cacheEntry.attrNZ !== null ) {
buffer.normals.push( element[ cacheEntry.attrNX ], element[ cacheEntry.attrNY ], element[ cacheEntry.attrNZ ] );
}
if ( cacheEntry.attrS !== null && cacheEntry.attrT !== null ) {
buffer.uvs.push( element[ cacheEntry.attrS ], element[ cacheEntry.attrT ] );
}
if ( cacheEntry.attrR !== null && cacheEntry.attrG !== null && cacheEntry.attrB !== null ) {
_color.setRGB(
element[ cacheEntry.attrR ] / 255.0,
element[ cacheEntry.attrG ] / 255.0,
element[ cacheEntry.attrB ] / 255.0
).convertSRGBToLinear();
buffer.colors.push( _color.r, _color.g, _color.b );
}
for ( const customProperty of Object.keys( scope.customPropertyMapping ) ) {
for ( const elementProperty of scope.customPropertyMapping[ customProperty ] ) {
buffer[ customProperty ].push( element[ elementProperty ] );
}
}
} else if ( elementName === 'face' ) {
const vertex_indices = element.vertex_indices || element.vertex_index; // issue #9338
const texcoord = element.texcoord;
if ( vertex_indices.length === 3 ) {
buffer.indices.push( vertex_indices[ 0 ], vertex_indices[ 1 ], vertex_indices[ 2 ] );
if ( texcoord && texcoord.length === 6 ) {
buffer.faceVertexUvs.push( texcoord[ 0 ], texcoord[ 1 ] );
buffer.faceVertexUvs.push( texcoord[ 2 ], texcoord[ 3 ] );
buffer.faceVertexUvs.push( texcoord[ 4 ], texcoord[ 5 ] );
}
} else if ( vertex_indices.length === 4 ) {
buffer.indices.push( vertex_indices[ 0 ], vertex_indices[ 1 ], vertex_indices[ 3 ] );
buffer.indices.push( vertex_indices[ 1 ], vertex_indices[ 2 ], vertex_indices[ 3 ] );
}
// face colors
if ( cacheEntry.attrR !== null && cacheEntry.attrG !== null && cacheEntry.attrB !== null ) {
_color.setRGB(
element[ cacheEntry.attrR ] / 255.0,
element[ cacheEntry.attrG ] / 255.0,
element[ cacheEntry.attrB ] / 255.0
).convertSRGBToLinear();
buffer.faceVertexColors.push( _color.r, _color.g, _color.b );
buffer.faceVertexColors.push( _color.r, _color.g, _color.b );
buffer.faceVertexColors.push( _color.r, _color.g, _color.b );
}
}
}
function binaryReadElement( at, properties ) {
const element = {};
let read = 0;
for ( let i = 0; i < properties.length; i ++ ) {
const property = properties[ i ];
const valueReader = property.valueReader;
if ( property.type === 'list' ) {
const list = [];
const n = property.countReader.read( at + read );
read += property.countReader.size;
for ( let j = 0; j < n; j ++ ) {
list.push( valueReader.read( at + read ) );
read += valueReader.size;
}
element[ property.name ] = list;
} else {
element[ property.name ] = valueReader.read( at + read );
read += valueReader.size;
}
}
return [ element, read ];
}
function setPropertyBinaryReaders( properties, body, little_endian ) {
function getBinaryReader( dataview, type, little_endian ) {
switch ( type ) {
// corespondences for non-specific length types here match rply:
case 'int8': case 'char': return { read: ( at ) => {
return dataview.getInt8( at );
}, size: 1 };
case 'uint8': case 'uchar': return { read: ( at ) => {
return dataview.getUint8( at );
}, size: 1 };
case 'int16': case 'short': return { read: ( at ) => {
return dataview.getInt16( at, little_endian );
}, size: 2 };
case 'uint16': case 'ushort': return { read: ( at ) => {
return dataview.getUint16( at, little_endian );
}, size: 2 };
case 'int32': case 'int': return { read: ( at ) => {
return dataview.getInt32( at, little_endian );
}, size: 4 };
case 'uint32': case 'uint': return { read: ( at ) => {
return dataview.getUint32( at, little_endian );
}, size: 4 };
case 'float32': case 'float': return { read: ( at ) => {
return dataview.getFloat32( at, little_endian );
}, size: 4 };
case 'float64': case 'double': return { read: ( at ) => {
return dataview.getFloat64( at, little_endian );
}, size: 8 };
}
}
for ( let i = 0, l = properties.length; i < l; i ++ ) {
const property = properties[ i ];
if ( property.type === 'list' ) {
property.countReader = getBinaryReader( body, property.countType, little_endian );
property.valueReader = getBinaryReader( body, property.itemType, little_endian );
} else {
property.valueReader = getBinaryReader( body, property.type, little_endian );
}
}
}
function parseBinary( data, header ) {
const buffer = createBuffer();
const little_endian = ( header.format === 'binary_little_endian' );
const body = new DataView( data, header.headerLength );
let result, loc = 0;
for ( let currentElement = 0; currentElement < header.elements.length; currentElement ++ ) {
const elementDesc = header.elements[ currentElement ];
const properties = elementDesc.properties;
const attributeMap = mapElementAttributes( properties );
setPropertyBinaryReaders( properties, body, little_endian );
for ( let currentElementCount = 0; currentElementCount < elementDesc.count; currentElementCount ++ ) {
result = binaryReadElement( loc, properties );
loc += result[ 1 ];
const element = result[ 0 ];
handleElement( buffer, elementDesc.name, element, attributeMap );
}
}
return postProcess( buffer );
}
function extractHeaderText( bytes ) {
let i = 0;
let cont = true;
let line = '';
const lines = [];
const startLine = new TextDecoder().decode( bytes.subarray( 0, 5 ) );
const hasCRNL = /^ply\r\n/.test( startLine );
do {
const c = String.fromCharCode( bytes[ i ++ ] );
if ( c !== '\n' && c !== '\r' ) {
line += c;
} else {
if ( line === 'end_header' ) cont = false;
if ( line !== '' ) {
lines.push( line );
line = '';
}
}
} while ( cont && i < bytes.length );
// ascii section using \r\n as line endings
if ( hasCRNL === true ) i ++;
return { headerText: lines.join( '\r' ) + '\r', headerLength: i };
}
//
let geometry;
const scope = this;
if ( data instanceof ArrayBuffer ) {
const bytes = new Uint8Array( data );
const { headerText, headerLength } = extractHeaderText( bytes );
const header = parseHeader( headerText, headerLength );
if ( header.format === 'ascii' ) {
const text = new TextDecoder().decode( bytes );
geometry = parseASCII( text, header );
} else {
geometry = parseBinary( data, header );
}
} else {
geometry = parseASCII( data, parseHeader( data ) );
}
return geometry;
}
}
class ArrayStream {
constructor( arr ) {
this.arr = arr;
this.i = 0;
}
empty() {
return this.i >= this.arr.length;
}
next() {
return this.arr[ this.i ++ ];
}
}
export { PLYLoader };

View File

@ -0,0 +1,251 @@
import {
CompressedTextureLoader,
RGBA_PVRTC_2BPPV1_Format,
RGBA_PVRTC_4BPPV1_Format,
RGB_PVRTC_2BPPV1_Format,
RGB_PVRTC_4BPPV1_Format
} from '/static/javascript/three/build/three.module.js';
/*
* PVR v2 (legacy) parser
* TODO : Add Support for PVR v3 format
* TODO : implement loadMipmaps option
*/
class PVRLoader extends CompressedTextureLoader {
constructor( manager ) {
super( manager );
}
parse( buffer, loadMipmaps ) {
const headerLengthInt = 13;
const header = new Uint32Array( buffer, 0, headerLengthInt );
const pvrDatas = {
buffer: buffer,
header: header,
loadMipmaps: loadMipmaps
};
if ( header[ 0 ] === 0x03525650 ) {
// PVR v3
return _parseV3( pvrDatas );
} else if ( header[ 11 ] === 0x21525650 ) {
// PVR v2
return _parseV2( pvrDatas );
} else {
console.error( 'THREE.PVRLoader: Unknown PVR format.' );
}
}
}
function _parseV3( pvrDatas ) {
const header = pvrDatas.header;
let bpp, format;
const metaLen = header[ 12 ],
pixelFormat = header[ 2 ],
height = header[ 6 ],
width = header[ 7 ],
// numSurfs = header[ 9 ],
numFaces = header[ 10 ],
numMipmaps = header[ 11 ];
switch ( pixelFormat ) {
case 0 : // PVRTC 2bpp RGB
bpp = 2;
format = RGB_PVRTC_2BPPV1_Format;
break;
case 1 : // PVRTC 2bpp RGBA
bpp = 2;
format = RGBA_PVRTC_2BPPV1_Format;
break;
case 2 : // PVRTC 4bpp RGB
bpp = 4;
format = RGB_PVRTC_4BPPV1_Format;
break;
case 3 : // PVRTC 4bpp RGBA
bpp = 4;
format = RGBA_PVRTC_4BPPV1_Format;
break;
default :
console.error( 'THREE.PVRLoader: Unsupported PVR format:', pixelFormat );
}
pvrDatas.dataPtr = 52 + metaLen;
pvrDatas.bpp = bpp;
pvrDatas.format = format;
pvrDatas.width = width;
pvrDatas.height = height;
pvrDatas.numSurfaces = numFaces;
pvrDatas.numMipmaps = numMipmaps;
pvrDatas.isCubemap = ( numFaces === 6 );
return _extract( pvrDatas );
}
function _parseV2( pvrDatas ) {
const header = pvrDatas.header;
const headerLength = header[ 0 ],
height = header[ 1 ],
width = header[ 2 ],
numMipmaps = header[ 3 ],
flags = header[ 4 ],
// dataLength = header[ 5 ],
// bpp = header[ 6 ],
// bitmaskRed = header[ 7 ],
// bitmaskGreen = header[ 8 ],
// bitmaskBlue = header[ 9 ],
bitmaskAlpha = header[ 10 ],
// pvrTag = header[ 11 ],
numSurfs = header[ 12 ];
const TYPE_MASK = 0xff;
const PVRTC_2 = 24,
PVRTC_4 = 25;
const formatFlags = flags & TYPE_MASK;
let bpp, format;
const _hasAlpha = bitmaskAlpha > 0;
if ( formatFlags === PVRTC_4 ) {
format = _hasAlpha ? RGBA_PVRTC_4BPPV1_Format : RGB_PVRTC_4BPPV1_Format;
bpp = 4;
} else if ( formatFlags === PVRTC_2 ) {
format = _hasAlpha ? RGBA_PVRTC_2BPPV1_Format : RGB_PVRTC_2BPPV1_Format;
bpp = 2;
} else {
console.error( 'THREE.PVRLoader: Unknown PVR format:', formatFlags );
}
pvrDatas.dataPtr = headerLength;
pvrDatas.bpp = bpp;
pvrDatas.format = format;
pvrDatas.width = width;
pvrDatas.height = height;
pvrDatas.numSurfaces = numSurfs;
pvrDatas.numMipmaps = numMipmaps + 1;
// guess cubemap type seems tricky in v2
// it juste a pvr containing 6 surface (no explicit cubemap type)
pvrDatas.isCubemap = ( numSurfs === 6 );
return _extract( pvrDatas );
}
function _extract( pvrDatas ) {
const pvr = {
mipmaps: [],
width: pvrDatas.width,
height: pvrDatas.height,
format: pvrDatas.format,
mipmapCount: pvrDatas.numMipmaps,
isCubemap: pvrDatas.isCubemap
};
const buffer = pvrDatas.buffer;
let dataOffset = pvrDatas.dataPtr,
dataSize = 0,
blockSize = 0,
blockWidth = 0,
blockHeight = 0,
widthBlocks = 0,
heightBlocks = 0;
const bpp = pvrDatas.bpp,
numSurfs = pvrDatas.numSurfaces;
if ( bpp === 2 ) {
blockWidth = 8;
blockHeight = 4;
} else {
blockWidth = 4;
blockHeight = 4;
}
blockSize = ( blockWidth * blockHeight ) * bpp / 8;
pvr.mipmaps.length = pvrDatas.numMipmaps * numSurfs;
let mipLevel = 0;
while ( mipLevel < pvrDatas.numMipmaps ) {
const sWidth = pvrDatas.width >> mipLevel,
sHeight = pvrDatas.height >> mipLevel;
widthBlocks = sWidth / blockWidth;
heightBlocks = sHeight / blockHeight;
// Clamp to minimum number of blocks
if ( widthBlocks < 2 ) widthBlocks = 2;
if ( heightBlocks < 2 ) heightBlocks = 2;
dataSize = widthBlocks * heightBlocks * blockSize;
for ( let surfIndex = 0; surfIndex < numSurfs; surfIndex ++ ) {
const byteArray = new Uint8Array( buffer, dataOffset, dataSize );
const mipmap = {
data: byteArray,
width: sWidth,
height: sHeight
};
pvr.mipmaps[ surfIndex * pvrDatas.numMipmaps + mipLevel ] = mipmap;
dataOffset += dataSize;
}
mipLevel ++;
}
return pvr;
}
export { PVRLoader };

View File

@ -0,0 +1,178 @@
/**
* `PackedPhongMaterial` inherited from THREE.MeshPhongMaterial
*
* @param {Object} parameters
*/
import {
MeshPhongMaterial,
ShaderChunk,
ShaderLib,
UniformsUtils,
} from '/static/javascript/three/build/three.module.js';
class PackedPhongMaterial extends MeshPhongMaterial {
constructor( parameters ) {
super();
this.defines = {};
this.type = 'PackedPhongMaterial';
this.uniforms = UniformsUtils.merge( [
ShaderLib.phong.uniforms,
{
quantizeMatPos: { value: null },
quantizeMatUV: { value: null }
}
] );
this.vertexShader = [
'#define PHONG',
'varying vec3 vViewPosition;',
ShaderChunk.common,
ShaderChunk.uv_pars_vertex,
ShaderChunk.displacementmap_pars_vertex,
ShaderChunk.envmap_pars_vertex,
ShaderChunk.color_pars_vertex,
ShaderChunk.fog_pars_vertex,
ShaderChunk.normal_pars_vertex,
ShaderChunk.morphtarget_pars_vertex,
ShaderChunk.skinning_pars_vertex,
ShaderChunk.shadowmap_pars_vertex,
ShaderChunk.logdepthbuf_pars_vertex,
ShaderChunk.clipping_planes_pars_vertex,
`#ifdef USE_PACKED_NORMAL
#if USE_PACKED_NORMAL == 0
vec3 decodeNormal(vec3 packedNormal)
{
float x = packedNormal.x * 2.0 - 1.0;
float y = packedNormal.y * 2.0 - 1.0;
vec2 scth = vec2(sin(x * PI), cos(x * PI));
vec2 scphi = vec2(sqrt(1.0 - y * y), y);
return normalize( vec3(scth.y * scphi.x, scth.x * scphi.x, scphi.y) );
}
#endif
#if USE_PACKED_NORMAL == 1
vec3 decodeNormal(vec3 packedNormal)
{
vec3 v = vec3(packedNormal.xy, 1.0 - abs(packedNormal.x) - abs(packedNormal.y));
if (v.z < 0.0)
{
v.xy = (1.0 - abs(v.yx)) * vec2((v.x >= 0.0) ? +1.0 : -1.0, (v.y >= 0.0) ? +1.0 : -1.0);
}
return normalize(v);
}
#endif
#if USE_PACKED_NORMAL == 2
vec3 decodeNormal(vec3 packedNormal)
{
vec3 v = (packedNormal * 2.0) - 1.0;
return normalize(v);
}
#endif
#endif`,
`#ifdef USE_PACKED_POSITION
#if USE_PACKED_POSITION == 0
uniform mat4 quantizeMatPos;
#endif
#endif`,
`#ifdef USE_PACKED_UV
#if USE_PACKED_UV == 1
uniform mat3 quantizeMatUV;
#endif
#endif`,
`#ifdef USE_PACKED_UV
#if USE_PACKED_UV == 0
vec2 decodeUV(vec2 packedUV)
{
vec2 uv = (packedUV * 2.0) - 1.0;
return uv;
}
#endif
#if USE_PACKED_UV == 1
vec2 decodeUV(vec2 packedUV)
{
vec2 uv = ( vec3(packedUV, 1.0) * quantizeMatUV ).xy;
return uv;
}
#endif
#endif`,
'void main() {',
ShaderChunk.uv_vertex,
`#ifdef USE_MAP
#ifdef USE_PACKED_UV
vMapUv = decodeUV(vMapUv);
#endif
#endif`,
ShaderChunk.color_vertex,
ShaderChunk.morphcolor_vertex,
ShaderChunk.beginnormal_vertex,
`#ifdef USE_PACKED_NORMAL
objectNormal = decodeNormal(objectNormal);
#endif
#ifdef USE_TANGENT
vec3 objectTangent = vec3( tangent.xyz );
#endif
`,
ShaderChunk.morphnormal_vertex,
ShaderChunk.skinbase_vertex,
ShaderChunk.skinnormal_vertex,
ShaderChunk.defaultnormal_vertex,
ShaderChunk.normal_vertex,
ShaderChunk.begin_vertex,
`#ifdef USE_PACKED_POSITION
#if USE_PACKED_POSITION == 0
transformed = ( vec4(transformed, 1.0) * quantizeMatPos ).xyz;
#endif
#endif`,
ShaderChunk.morphtarget_vertex,
ShaderChunk.skinning_vertex,
ShaderChunk.displacementmap_vertex,
ShaderChunk.project_vertex,
ShaderChunk.logdepthbuf_vertex,
ShaderChunk.clipping_planes_vertex,
'vViewPosition = - mvPosition.xyz;',
ShaderChunk.worldpos_vertex,
ShaderChunk.envmap_vertex,
ShaderChunk.shadowmap_vertex,
ShaderChunk.fog_vertex,
'}',
].join( '\n' );
// Use the original MeshPhongMaterial's fragmentShader.
this.fragmentShader = ShaderLib.phong.fragmentShader;
this.setValues( parameters );
}
}
export { PackedPhongMaterial };

View File

@ -0,0 +1,226 @@
import {
Matrix4,
Vector2,
Vector3,
} from '/static/javascript/three/build/three.module.js';
/**
* References:
* https://openaccess.thecvf.com/content/WACV2021/papers/Khademi_Self-Supervised_Poisson-Gaussian_Denoising_WACV_2021_paper.pdf
* https://arxiv.org/pdf/2206.01856.pdf
*/
const PoissonDenoiseShader = {
name: 'PoissonDenoiseShader',
defines: {
'SAMPLES': 16,
'SAMPLE_VECTORS': generatePdSamplePointInitializer( 16, 2, 1 ),
'NORMAL_VECTOR_TYPE': 1,
'DEPTH_VALUE_SOURCE': 0,
},
uniforms: {
'tDiffuse': { value: null },
'tNormal': { value: null },
'tDepth': { value: null },
'tNoise': { value: null },
'resolution': { value: new Vector2() },
'cameraProjectionMatrixInverse': { value: new Matrix4() },
'lumaPhi': { value: 5. },
'depthPhi': { value: 5. },
'normalPhi': { value: 5. },
'radius': { value: 4. },
'index': { value: 0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
varying vec2 vUv;
uniform sampler2D tDiffuse;
uniform sampler2D tNormal;
uniform sampler2D tDepth;
uniform sampler2D tNoise;
uniform vec2 resolution;
uniform mat4 cameraProjectionMatrixInverse;
uniform float lumaPhi;
uniform float depthPhi;
uniform float normalPhi;
uniform float radius;
uniform int index;
#include <common>
#include <packing>
#ifndef SAMPLE_LUMINANCE
#define SAMPLE_LUMINANCE dot(vec3(0.2125, 0.7154, 0.0721), a)
#endif
#ifndef FRAGMENT_OUTPUT
#define FRAGMENT_OUTPUT vec4(denoised, 1.)
#endif
float getLuminance(const in vec3 a) {
return SAMPLE_LUMINANCE;
}
const vec3 poissonDisk[SAMPLES] = SAMPLE_VECTORS;
vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;
return viewSpacePosition.xyz / viewSpacePosition.w;
}
float getDepth(const vec2 uv) {
#if DEPTH_VALUE_SOURCE == 1
return textureLod(tDepth, uv.xy, 0.0).a;
#else
return textureLod(tDepth, uv.xy, 0.0).r;
#endif
}
float fetchDepth(const ivec2 uv) {
#if DEPTH_VALUE_SOURCE == 1
return texelFetch(tDepth, uv.xy, 0).a;
#else
return texelFetch(tDepth, uv.xy, 0).r;
#endif
}
vec3 computeNormalFromDepth(const vec2 uv) {
vec2 size = vec2(textureSize(tDepth, 0));
ivec2 p = ivec2(uv * size);
float c0 = fetchDepth(p);
float l2 = fetchDepth(p - ivec2(2, 0));
float l1 = fetchDepth(p - ivec2(1, 0));
float r1 = fetchDepth(p + ivec2(1, 0));
float r2 = fetchDepth(p + ivec2(2, 0));
float b2 = fetchDepth(p - ivec2(0, 2));
float b1 = fetchDepth(p - ivec2(0, 1));
float t1 = fetchDepth(p + ivec2(0, 1));
float t2 = fetchDepth(p + ivec2(0, 2));
float dl = abs((2.0 * l1 - l2) - c0);
float dr = abs((2.0 * r1 - r2) - c0);
float db = abs((2.0 * b1 - b2) - c0);
float dt = abs((2.0 * t1 - t2) - c0);
vec3 ce = getViewPosition(uv, c0).xyz;
vec3 dpdx = (dl < dr) ? ce - getViewPosition((uv - vec2(1.0 / size.x, 0.0)), l1).xyz
: -ce + getViewPosition((uv + vec2(1.0 / size.x, 0.0)), r1).xyz;
vec3 dpdy = (db < dt) ? ce - getViewPosition((uv - vec2(0.0, 1.0 / size.y)), b1).xyz
: -ce + getViewPosition((uv + vec2(0.0, 1.0 / size.y)), t1).xyz;
return normalize(cross(dpdx, dpdy));
}
vec3 getViewNormal(const vec2 uv) {
#if NORMAL_VECTOR_TYPE == 2
return normalize(textureLod(tNormal, uv, 0.).rgb);
#elif NORMAL_VECTOR_TYPE == 1
return unpackRGBToNormal(textureLod(tNormal, uv, 0.).rgb);
#else
return computeNormalFromDepth(uv);
#endif
}
void denoiseSample(in vec3 center, in vec3 viewNormal, in vec3 viewPos, in vec2 sampleUv, inout vec3 denoised, inout float totalWeight) {
vec4 sampleTexel = textureLod(tDiffuse, sampleUv, 0.0);
float sampleDepth = getDepth(sampleUv);
vec3 sampleNormal = getViewNormal(sampleUv);
vec3 neighborColor = sampleTexel.rgb;
vec3 viewPosSample = getViewPosition(sampleUv, sampleDepth);
float normalDiff = dot(viewNormal, sampleNormal);
float normalSimilarity = pow(max(normalDiff, 0.), normalPhi);
float lumaDiff = abs(getLuminance(neighborColor) - getLuminance(center));
float lumaSimilarity = max(1.0 - lumaDiff / lumaPhi, 0.0);
float depthDiff = abs(dot(viewPos - viewPosSample, viewNormal));
float depthSimilarity = max(1. - depthDiff / depthPhi, 0.);
float w = lumaSimilarity * depthSimilarity * normalSimilarity;
denoised += w * neighborColor;
totalWeight += w;
}
void main() {
float depth = getDepth(vUv.xy);
vec3 viewNormal = getViewNormal(vUv);
if (depth == 1. || dot(viewNormal, viewNormal) == 0.) {
discard;
return;
}
vec4 texel = textureLod(tDiffuse, vUv, 0.0);
vec3 center = texel.rgb;
vec3 viewPos = getViewPosition(vUv, depth);
vec2 noiseResolution = vec2(textureSize(tNoise, 0));
vec2 noiseUv = vUv * resolution / noiseResolution;
vec4 noiseTexel = textureLod(tNoise, noiseUv, 0.0);
vec2 noiseVec = vec2(sin(noiseTexel[index % 4] * 2. * PI), cos(noiseTexel[index % 4] * 2. * PI));
mat2 rotationMatrix = mat2(noiseVec.x, -noiseVec.y, noiseVec.x, noiseVec.y);
float totalWeight = 1.0;
vec3 denoised = texel.rgb;
for (int i = 0; i < SAMPLES; i++) {
vec3 sampleDir = poissonDisk[i];
vec2 offset = rotationMatrix * (sampleDir.xy * (1. + sampleDir.z * (radius - 1.)) / resolution);
vec2 sampleUv = vUv + offset;
denoiseSample(center, viewNormal, viewPos, sampleUv, denoised, totalWeight);
}
if (totalWeight > 0.) {
denoised /= totalWeight;
}
gl_FragColor = FRAGMENT_OUTPUT;
}`
};
function generatePdSamplePointInitializer( samples, rings, radiusExponent ) {
const poissonDisk = generateDenoiseSamples(
samples,
rings,
radiusExponent,
);
let glslCode = 'vec3[SAMPLES](';
for ( let i = 0; i < samples; i ++ ) {
const sample = poissonDisk[ i ];
glslCode += `vec3(${sample.x}, ${sample.y}, ${sample.z})${( i < samples - 1 ) ? ',' : ')'}`;
}
return glslCode;
}
function generateDenoiseSamples( numSamples, numRings, radiusExponent ) {
const samples = [];
for ( let i = 0; i < numSamples; i ++ ) {
const angle = 2 * Math.PI * numRings * i / numSamples;
const radius = Math.pow( i / ( numSamples - 1 ), radiusExponent );
samples.push( new Vector3( Math.cos( angle ), Math.sin( angle ), radius ) );
}
return samples;
}
export { generatePdSamplePointInitializer, PoissonDenoiseShader };

View File

@ -0,0 +1,450 @@
import {
DataTextureLoader,
DataUtils,
FloatType,
HalfFloatType,
LinearFilter,
LinearSRGBColorSpace
} from '/static/javascript/three/build/three.module.js';
// https://github.com/mrdoob/three.js/issues/5552
// http://en.wikipedia.org/wiki/RGBE_image_format
class RGBELoader extends DataTextureLoader {
constructor( manager ) {
super( manager );
this.type = HalfFloatType;
}
// adapted from http://www.graphics.cornell.edu/~bjw/rgbe.html
parse( buffer ) {
const
/* default error routine. change this to change error handling */
rgbe_read_error = 1,
rgbe_write_error = 2,
rgbe_format_error = 3,
rgbe_memory_error = 4,
rgbe_error = function ( rgbe_error_code, msg ) {
switch ( rgbe_error_code ) {
case rgbe_read_error: throw new Error( 'THREE.RGBELoader: Read Error: ' + ( msg || '' ) );
case rgbe_write_error: throw new Error( 'THREE.RGBELoader: Write Error: ' + ( msg || '' ) );
case rgbe_format_error: throw new Error( 'THREE.RGBELoader: Bad File Format: ' + ( msg || '' ) );
default:
case rgbe_memory_error: throw new Error( 'THREE.RGBELoader: Memory Error: ' + ( msg || '' ) );
}
},
/* offsets to red, green, and blue components in a data (float) pixel */
//RGBE_DATA_RED = 0,
//RGBE_DATA_GREEN = 1,
//RGBE_DATA_BLUE = 2,
/* number of floats per pixel, use 4 since stored in rgba image format */
//RGBE_DATA_SIZE = 4,
/* flags indicating which fields in an rgbe_header_info are valid */
RGBE_VALID_PROGRAMTYPE = 1,
RGBE_VALID_FORMAT = 2,
RGBE_VALID_DIMENSIONS = 4,
NEWLINE = '\n',
fgets = function ( buffer, lineLimit, consume ) {
const chunkSize = 128;
lineLimit = ! lineLimit ? 1024 : lineLimit;
let p = buffer.pos,
i = - 1, len = 0, s = '',
chunk = String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
while ( ( 0 > ( i = chunk.indexOf( NEWLINE ) ) ) && ( len < lineLimit ) && ( p < buffer.byteLength ) ) {
s += chunk; len += chunk.length;
p += chunkSize;
chunk += String.fromCharCode.apply( null, new Uint16Array( buffer.subarray( p, p + chunkSize ) ) );
}
if ( - 1 < i ) {
/*for (i=l-1; i>=0; i--) {
byteCode = m.charCodeAt(i);
if (byteCode > 0x7f && byteCode <= 0x7ff) byteLen++;
else if (byteCode > 0x7ff && byteCode <= 0xffff) byteLen += 2;
if (byteCode >= 0xDC00 && byteCode <= 0xDFFF) i--; //trail surrogate
}*/
if ( false !== consume ) buffer.pos += len + i + 1;
return s + chunk.slice( 0, i );
}
return false;
},
/* minimal header reading. modify if you want to parse more information */
RGBE_ReadHeader = function ( buffer ) {
// regexes to parse header info fields
const magic_token_re = /^#\?(\S+)/,
gamma_re = /^\s*GAMMA\s*=\s*(\d+(\.\d+)?)\s*$/,
exposure_re = /^\s*EXPOSURE\s*=\s*(\d+(\.\d+)?)\s*$/,
format_re = /^\s*FORMAT=(\S+)\s*$/,
dimensions_re = /^\s*\-Y\s+(\d+)\s+\+X\s+(\d+)\s*$/,
// RGBE format header struct
header = {
valid: 0, /* indicate which fields are valid */
string: '', /* the actual header string */
comments: '', /* comments found in header */
programtype: 'RGBE', /* listed at beginning of file to identify it after "#?". defaults to "RGBE" */
format: '', /* RGBE format, default 32-bit_rle_rgbe */
gamma: 1.0, /* image has already been gamma corrected with given gamma. defaults to 1.0 (no correction) */
exposure: 1.0, /* a value of 1.0 in an image corresponds to <exposure> watts/steradian/m^2. defaults to 1.0 */
width: 0, height: 0 /* image dimensions, width/height */
};
let line, match;
if ( buffer.pos >= buffer.byteLength || ! ( line = fgets( buffer ) ) ) {
rgbe_error( rgbe_read_error, 'no header found' );
}
/* if you want to require the magic token then uncomment the next line */
if ( ! ( match = line.match( magic_token_re ) ) ) {
rgbe_error( rgbe_format_error, 'bad initial token' );
}
header.valid |= RGBE_VALID_PROGRAMTYPE;
header.programtype = match[ 1 ];
header.string += line + '\n';
while ( true ) {
line = fgets( buffer );
if ( false === line ) break;
header.string += line + '\n';
if ( '#' === line.charAt( 0 ) ) {
header.comments += line + '\n';
continue; // comment line
}
if ( match = line.match( gamma_re ) ) {
header.gamma = parseFloat( match[ 1 ] );
}
if ( match = line.match( exposure_re ) ) {
header.exposure = parseFloat( match[ 1 ] );
}
if ( match = line.match( format_re ) ) {
header.valid |= RGBE_VALID_FORMAT;
header.format = match[ 1 ];//'32-bit_rle_rgbe';
}
if ( match = line.match( dimensions_re ) ) {
header.valid |= RGBE_VALID_DIMENSIONS;
header.height = parseInt( match[ 1 ], 10 );
header.width = parseInt( match[ 2 ], 10 );
}
if ( ( header.valid & RGBE_VALID_FORMAT ) && ( header.valid & RGBE_VALID_DIMENSIONS ) ) break;
}
if ( ! ( header.valid & RGBE_VALID_FORMAT ) ) {
rgbe_error( rgbe_format_error, 'missing format specifier' );
}
if ( ! ( header.valid & RGBE_VALID_DIMENSIONS ) ) {
rgbe_error( rgbe_format_error, 'missing image size specifier' );
}
return header;
},
RGBE_ReadPixels_RLE = function ( buffer, w, h ) {
const scanline_width = w;
if (
// run length encoding is not allowed so read flat
( ( scanline_width < 8 ) || ( scanline_width > 0x7fff ) ) ||
// this file is not run length encoded
( ( 2 !== buffer[ 0 ] ) || ( 2 !== buffer[ 1 ] ) || ( buffer[ 2 ] & 0x80 ) )
) {
// return the flat buffer
return new Uint8Array( buffer );
}
if ( scanline_width !== ( ( buffer[ 2 ] << 8 ) | buffer[ 3 ] ) ) {
rgbe_error( rgbe_format_error, 'wrong scanline width' );
}
const data_rgba = new Uint8Array( 4 * w * h );
if ( ! data_rgba.length ) {
rgbe_error( rgbe_memory_error, 'unable to allocate buffer space' );
}
let offset = 0, pos = 0;
const ptr_end = 4 * scanline_width;
const rgbeStart = new Uint8Array( 4 );
const scanline_buffer = new Uint8Array( ptr_end );
let num_scanlines = h;
// read in each successive scanline
while ( ( num_scanlines > 0 ) && ( pos < buffer.byteLength ) ) {
if ( pos + 4 > buffer.byteLength ) {
rgbe_error( rgbe_read_error );
}
rgbeStart[ 0 ] = buffer[ pos ++ ];
rgbeStart[ 1 ] = buffer[ pos ++ ];
rgbeStart[ 2 ] = buffer[ pos ++ ];
rgbeStart[ 3 ] = buffer[ pos ++ ];
if ( ( 2 != rgbeStart[ 0 ] ) || ( 2 != rgbeStart[ 1 ] ) || ( ( ( rgbeStart[ 2 ] << 8 ) | rgbeStart[ 3 ] ) != scanline_width ) ) {
rgbe_error( rgbe_format_error, 'bad rgbe scanline format' );
}
// read each of the four channels for the scanline into the buffer
// first red, then green, then blue, then exponent
let ptr = 0, count;
while ( ( ptr < ptr_end ) && ( pos < buffer.byteLength ) ) {
count = buffer[ pos ++ ];
const isEncodedRun = count > 128;
if ( isEncodedRun ) count -= 128;
if ( ( 0 === count ) || ( ptr + count > ptr_end ) ) {
rgbe_error( rgbe_format_error, 'bad scanline data' );
}
if ( isEncodedRun ) {
// a (encoded) run of the same value
const byteValue = buffer[ pos ++ ];
for ( let i = 0; i < count; i ++ ) {
scanline_buffer[ ptr ++ ] = byteValue;
}
//ptr += count;
} else {
// a literal-run
scanline_buffer.set( buffer.subarray( pos, pos + count ), ptr );
ptr += count; pos += count;
}
}
// now convert data from buffer into rgba
// first red, then green, then blue, then exponent (alpha)
const l = scanline_width; //scanline_buffer.byteLength;
for ( let i = 0; i < l; i ++ ) {
let off = 0;
data_rgba[ offset ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 1 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 2 ] = scanline_buffer[ i + off ];
off += scanline_width; //1;
data_rgba[ offset + 3 ] = scanline_buffer[ i + off ];
offset += 4;
}
num_scanlines --;
}
return data_rgba;
};
const RGBEByteToRGBFloat = function ( sourceArray, sourceOffset, destArray, destOffset ) {
const e = sourceArray[ sourceOffset + 3 ];
const scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
destArray[ destOffset + 0 ] = sourceArray[ sourceOffset + 0 ] * scale;
destArray[ destOffset + 1 ] = sourceArray[ sourceOffset + 1 ] * scale;
destArray[ destOffset + 2 ] = sourceArray[ sourceOffset + 2 ] * scale;
destArray[ destOffset + 3 ] = 1;
};
const RGBEByteToRGBHalf = function ( sourceArray, sourceOffset, destArray, destOffset ) {
const e = sourceArray[ sourceOffset + 3 ];
const scale = Math.pow( 2.0, e - 128.0 ) / 255.0;
// clamping to 65504, the maximum representable value in float16
destArray[ destOffset + 0 ] = DataUtils.toHalfFloat( Math.min( sourceArray[ sourceOffset + 0 ] * scale, 65504 ) );
destArray[ destOffset + 1 ] = DataUtils.toHalfFloat( Math.min( sourceArray[ sourceOffset + 1 ] * scale, 65504 ) );
destArray[ destOffset + 2 ] = DataUtils.toHalfFloat( Math.min( sourceArray[ sourceOffset + 2 ] * scale, 65504 ) );
destArray[ destOffset + 3 ] = DataUtils.toHalfFloat( 1 );
};
const byteArray = new Uint8Array( buffer );
byteArray.pos = 0;
const rgbe_header_info = RGBE_ReadHeader( byteArray );
const w = rgbe_header_info.width,
h = rgbe_header_info.height,
image_rgba_data = RGBE_ReadPixels_RLE( byteArray.subarray( byteArray.pos ), w, h );
let data, type;
let numElements;
switch ( this.type ) {
case FloatType:
numElements = image_rgba_data.length / 4;
const floatArray = new Float32Array( numElements * 4 );
for ( let j = 0; j < numElements; j ++ ) {
RGBEByteToRGBFloat( image_rgba_data, j * 4, floatArray, j * 4 );
}
data = floatArray;
type = FloatType;
break;
case HalfFloatType:
numElements = image_rgba_data.length / 4;
const halfArray = new Uint16Array( numElements * 4 );
for ( let j = 0; j < numElements; j ++ ) {
RGBEByteToRGBHalf( image_rgba_data, j * 4, halfArray, j * 4 );
}
data = halfArray;
type = HalfFloatType;
break;
default:
throw new Error( 'THREE.RGBELoader: Unsupported type: ' + this.type );
break;
}
return {
width: w, height: h,
data: data,
header: rgbe_header_info.string,
gamma: rgbe_header_info.gamma,
exposure: rgbe_header_info.exposure,
type: type
};
}
setDataType( value ) {
this.type = value;
return this;
}
load( url, onLoad, onProgress, onError ) {
function onLoadCallback( texture, texData ) {
switch ( texture.type ) {
case FloatType:
case HalfFloatType:
texture.colorSpace = LinearSRGBColorSpace;
texture.minFilter = LinearFilter;
texture.magFilter = LinearFilter;
texture.generateMipmaps = false;
texture.flipY = true;
break;
}
if ( onLoad ) onLoad( texture, texData );
}
return super.load( url, onLoadCallback, onProgress, onError );
}
}
export { RGBELoader };

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,54 @@
/**
* RGB Shift Shader
* Shifts red and blue channels from center in opposite directions
* Ported from https://web.archive.org/web/20090820185047/http://kriss.cx/tom/2009/05/rgb-shift/
* by Tom Butterworth / https://web.archive.org/web/20090810054752/http://kriss.cx/tom/
*
* amount: shift distance (1 is width of input)
* angle: shift angle in radians
*/
const RGBShiftShader = {
name: 'RGBShiftShader',
uniforms: {
'tDiffuse': { value: null },
'amount': { value: 0.005 },
'angle': { value: 0.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform float amount;
uniform float angle;
varying vec2 vUv;
void main() {
vec2 offset = amount * vec2( cos(angle), sin(angle));
vec4 cr = texture2D(tDiffuse, vUv + offset);
vec4 cga = texture2D(tDiffuse, vUv);
vec4 cb = texture2D(tDiffuse, vUv - offset);
gl_FragColor = vec4(cr.r, cga.g, cb.b, cga.a);
}`
};
export { RGBShiftShader };

View File

@ -0,0 +1,235 @@
import {
WebGLRenderTarget,
MeshNormalMaterial,
ShaderMaterial,
Vector2,
Vector4,
DepthTexture,
NearestFilter,
HalfFloatType
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from'/static/javascript/three/examples/jsm/postprocessing/Pass.js';
class RenderPixelatedPass extends Pass {
constructor( pixelSize, scene, camera, options = {} ) {
super();
this.pixelSize = pixelSize;
this.resolution = new Vector2();
this.renderResolution = new Vector2();
this.pixelatedMaterial = this.createPixelatedMaterial();
this.normalMaterial = new MeshNormalMaterial();
this.fsQuad = new FullScreenQuad( this.pixelatedMaterial );
this.scene = scene;
this.camera = camera;
this.normalEdgeStrength = options.normalEdgeStrength || 0.3;
this.depthEdgeStrength = options.depthEdgeStrength || 0.4;
this.beautyRenderTarget = new WebGLRenderTarget();
this.beautyRenderTarget.texture.minFilter = NearestFilter;
this.beautyRenderTarget.texture.magFilter = NearestFilter;
this.beautyRenderTarget.texture.type = HalfFloatType;
this.beautyRenderTarget.depthTexture = new DepthTexture();
this.normalRenderTarget = new WebGLRenderTarget();
this.normalRenderTarget.texture.minFilter = NearestFilter;
this.normalRenderTarget.texture.magFilter = NearestFilter;
this.normalRenderTarget.texture.type = HalfFloatType;
}
dispose() {
this.beautyRenderTarget.dispose();
this.normalRenderTarget.dispose();
this.pixelatedMaterial.dispose();
this.normalMaterial.dispose();
this.fsQuad.dispose();
}
setSize( width, height ) {
this.resolution.set( width, height );
this.renderResolution.set( ( width / this.pixelSize ) | 0, ( height / this.pixelSize ) | 0 );
const { x, y } = this.renderResolution;
this.beautyRenderTarget.setSize( x, y );
this.normalRenderTarget.setSize( x, y );
this.fsQuad.material.uniforms.resolution.value.set( x, y, 1 / x, 1 / y );
}
setPixelSize( pixelSize ) {
this.pixelSize = pixelSize;
this.setSize( this.resolution.x, this.resolution.y );
}
render( renderer, writeBuffer ) {
const uniforms = this.fsQuad.material.uniforms;
uniforms.normalEdgeStrength.value = this.normalEdgeStrength;
uniforms.depthEdgeStrength.value = this.depthEdgeStrength;
renderer.setRenderTarget( this.beautyRenderTarget );
renderer.render( this.scene, this.camera );
const overrideMaterial_old = this.scene.overrideMaterial;
renderer.setRenderTarget( this.normalRenderTarget );
this.scene.overrideMaterial = this.normalMaterial;
renderer.render( this.scene, this.camera );
this.scene.overrideMaterial = overrideMaterial_old;
uniforms.tDiffuse.value = this.beautyRenderTarget.texture;
uniforms.tDepth.value = this.beautyRenderTarget.depthTexture;
uniforms.tNormal.value = this.normalRenderTarget.texture;
if ( this.renderToScreen ) {
renderer.setRenderTarget( null );
} else {
renderer.setRenderTarget( writeBuffer );
if ( this.clear ) renderer.clear();
}
this.fsQuad.render( renderer );
}
createPixelatedMaterial() {
return new ShaderMaterial( {
uniforms: {
tDiffuse: { value: null },
tDepth: { value: null },
tNormal: { value: null },
resolution: {
value: new Vector4(
this.renderResolution.x,
this.renderResolution.y,
1 / this.renderResolution.x,
1 / this.renderResolution.y,
)
},
normalEdgeStrength: { value: 0 },
depthEdgeStrength: { value: 0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform sampler2D tDepth;
uniform sampler2D tNormal;
uniform vec4 resolution;
uniform float normalEdgeStrength;
uniform float depthEdgeStrength;
varying vec2 vUv;
float getDepth(int x, int y) {
return texture2D( tDepth, vUv + vec2(x, y) * resolution.zw ).r;
}
vec3 getNormal(int x, int y) {
return texture2D( tNormal, vUv + vec2(x, y) * resolution.zw ).rgb * 2.0 - 1.0;
}
float depthEdgeIndicator(float depth, vec3 normal) {
float diff = 0.0;
diff += clamp(getDepth(1, 0) - depth, 0.0, 1.0);
diff += clamp(getDepth(-1, 0) - depth, 0.0, 1.0);
diff += clamp(getDepth(0, 1) - depth, 0.0, 1.0);
diff += clamp(getDepth(0, -1) - depth, 0.0, 1.0);
return floor(smoothstep(0.01, 0.02, diff) * 2.) / 2.;
}
float neighborNormalEdgeIndicator(int x, int y, float depth, vec3 normal) {
float depthDiff = getDepth(x, y) - depth;
vec3 neighborNormal = getNormal(x, y);
// Edge pixels should yield to faces who's normals are closer to the bias normal.
vec3 normalEdgeBias = vec3(1., 1., 1.); // This should probably be a parameter.
float normalDiff = dot(normal - neighborNormal, normalEdgeBias);
float normalIndicator = clamp(smoothstep(-.01, .01, normalDiff), 0.0, 1.0);
// Only the shallower pixel should detect the normal edge.
float depthIndicator = clamp(sign(depthDiff * .25 + .0025), 0.0, 1.0);
return (1.0 - dot(normal, neighborNormal)) * depthIndicator * normalIndicator;
}
float normalEdgeIndicator(float depth, vec3 normal) {
float indicator = 0.0;
indicator += neighborNormalEdgeIndicator(0, -1, depth, normal);
indicator += neighborNormalEdgeIndicator(0, 1, depth, normal);
indicator += neighborNormalEdgeIndicator(-1, 0, depth, normal);
indicator += neighborNormalEdgeIndicator(1, 0, depth, normal);
return step(0.1, indicator);
}
void main() {
vec4 texel = texture2D( tDiffuse, vUv );
float depth = 0.0;
vec3 normal = vec3(0.0);
if (depthEdgeStrength > 0.0 || normalEdgeStrength > 0.0) {
depth = getDepth(0, 0);
normal = getNormal(0, 0);
}
float dei = 0.0;
if (depthEdgeStrength > 0.0)
dei = depthEdgeIndicator(depth, normal);
float nei = 0.0;
if (normalEdgeStrength > 0.0)
nei = normalEdgeIndicator(depth, normal);
float Strength = dei > 0.0 ? (1.0 - depthEdgeStrength * dei) : (1.0 + normalEdgeStrength * nei);
gl_FragColor = texel * Strength;
}
`
} );
}
}
export { RenderPixelatedPass };

View File

@ -0,0 +1,168 @@
import {
HalfFloatType,
ShaderMaterial,
WebGLRenderTarget
} from '/static/javascript/three/build/three.module.js';
import { FullScreenQuad, Pass } from'/static/javascript/three/examples/jsm/postprocessing/Pass.js';
class RenderTransitionPass extends Pass {
constructor( sceneA, cameraA, sceneB, cameraB ) {
super();
this.material = this.createMaterial();
this.fsQuad = new FullScreenQuad( this.material );
this.sceneA = sceneA;
this.cameraA = cameraA;
this.sceneB = sceneB;
this.cameraB = cameraB;
this.renderTargetA = new WebGLRenderTarget();
this.renderTargetA.texture.type = HalfFloatType;
this.renderTargetB = new WebGLRenderTarget();
this.renderTargetB.texture.type = HalfFloatType;
}
setTransition( value ) {
this.material.uniforms.mixRatio.value = value;
}
useTexture( value ) {
this.material.uniforms.useTexture.value = value ? 1 : 0;
}
setTexture( value ) {
this.material.uniforms.tMixTexture.value = value;
}
setTextureThreshold( value ) {
this.material.uniforms.threshold.value = value;
}
setSize( width, height ) {
this.renderTargetA.setSize( width, height );
this.renderTargetB.setSize( width, height );
}
render( renderer, writeBuffer ) {
renderer.setRenderTarget( this.renderTargetA );
renderer.render( this.sceneA, this.cameraA );
renderer.setRenderTarget( this.renderTargetB );
renderer.render( this.sceneB, this.cameraB );
const uniforms = this.fsQuad.material.uniforms;
uniforms.tDiffuse1.value = this.renderTargetA.texture;
uniforms.tDiffuse2.value = this.renderTargetB.texture;
if ( this.renderToScreen ) {
renderer.setRenderTarget( null );
renderer.clear();
} else {
renderer.setRenderTarget( writeBuffer );
if ( this.clear ) renderer.clear();
}
this.fsQuad.render( renderer );
}
dispose() {
this.renderTargetA.dispose();
this.renderTargetB.dispose();
this.material.dispose();
this.fsQuad.dispose();
}
createMaterial() {
return new ShaderMaterial( {
uniforms: {
tDiffuse1: {
value: null
},
tDiffuse2: {
value: null
},
mixRatio: {
value: 0.0
},
threshold: {
value: 0.1
},
useTexture: {
value: 1
},
tMixTexture: {
value: null
}
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = vec2( uv.x, uv.y );
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`,
fragmentShader: /* glsl */`
uniform float mixRatio;
uniform sampler2D tDiffuse1;
uniform sampler2D tDiffuse2;
uniform sampler2D tMixTexture;
uniform int useTexture;
uniform float threshold;
varying vec2 vUv;
void main() {
vec4 texel1 = texture2D( tDiffuse1, vUv );
vec4 texel2 = texture2D( tDiffuse2, vUv );
if (useTexture == 1) {
vec4 transitionTexel = texture2D( tMixTexture, vUv );
float r = mixRatio * ( 1.0 + threshold * 2.0 ) - threshold;
float mixf = clamp( ( transitionTexel.r - r ) * ( 1.0 / threshold ), 0.0, 1.0 );
gl_FragColor = mix( texel1, texel2, mixf );
} else {
gl_FragColor = mix( texel2, texel1, mixRatio );
}
}
`
} );
}
}
export { RenderTransitionPass };

View File

@ -0,0 +1,334 @@
import {
AddEquation,
Color,
CustomBlending,
DepthTexture,
DstAlphaFactor,
DstColorFactor,
HalfFloatType,
MeshNormalMaterial,
NearestFilter,
NoBlending,
ShaderMaterial,
UniformsUtils,
DepthStencilFormat,
UnsignedInt248Type,
Vector2,
WebGLRenderTarget,
ZeroFactor
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from'/static/javascript/three/examples/jsm/postprocessing/Pass.js';
import { SAOShader } from'/static/javascript/three/examples/jsm/shaders/SAOShader.js';
import { DepthLimitedBlurShader } from'/static/javascript/three/examples/jsm/shaders/DepthLimitedBlurShader.js';
import { BlurShaderUtils } from'/static/javascript/three/examples/jsm/shaders/DepthLimitedBlurShader.js';
import { CopyShader } from'/static/javascript/three/examples/jsm/shaders/CopyShader.js';
/**
* SAO implementation inspired from bhouston previous SAO work
*/
class SAOPass extends Pass {
constructor( scene, camera, resolution = new Vector2( 256, 256 ) ) {
super();
this.scene = scene;
this.camera = camera;
this.clear = true;
this.needsSwap = false;
this.originalClearColor = new Color();
this._oldClearColor = new Color();
this.oldClearAlpha = 1;
this.params = {
output: 0,
saoBias: 0.5,
saoIntensity: 0.18,
saoScale: 1,
saoKernelRadius: 100,
saoMinResolution: 0,
saoBlur: true,
saoBlurRadius: 8,
saoBlurStdDev: 4,
saoBlurDepthCutoff: 0.01
};
this.resolution = new Vector2( resolution.x, resolution.y );
this.saoRenderTarget = new WebGLRenderTarget( this.resolution.x, this.resolution.y, { type: HalfFloatType } );
this.blurIntermediateRenderTarget = this.saoRenderTarget.clone();
const depthTexture = new DepthTexture();
depthTexture.format = DepthStencilFormat;
depthTexture.type = UnsignedInt248Type;
this.normalRenderTarget = new WebGLRenderTarget( this.resolution.x, this.resolution.y, {
minFilter: NearestFilter,
magFilter: NearestFilter,
type: HalfFloatType,
depthTexture: depthTexture
} );
this.normalMaterial = new MeshNormalMaterial();
this.normalMaterial.blending = NoBlending;
this.saoMaterial = new ShaderMaterial( {
defines: Object.assign( {}, SAOShader.defines ),
fragmentShader: SAOShader.fragmentShader,
vertexShader: SAOShader.vertexShader,
uniforms: UniformsUtils.clone( SAOShader.uniforms )
} );
this.saoMaterial.defines[ 'PERSPECTIVE_CAMERA' ] = this.camera.isPerspectiveCamera ? 1 : 0;
this.saoMaterial.uniforms[ 'tDepth' ].value = depthTexture;
this.saoMaterial.uniforms[ 'tNormal' ].value = this.normalRenderTarget.texture;
this.saoMaterial.uniforms[ 'size' ].value.set( this.resolution.x, this.resolution.y );
this.saoMaterial.uniforms[ 'cameraInverseProjectionMatrix' ].value.copy( this.camera.projectionMatrixInverse );
this.saoMaterial.uniforms[ 'cameraProjectionMatrix' ].value = this.camera.projectionMatrix;
this.saoMaterial.blending = NoBlending;
this.vBlurMaterial = new ShaderMaterial( {
uniforms: UniformsUtils.clone( DepthLimitedBlurShader.uniforms ),
defines: Object.assign( {}, DepthLimitedBlurShader.defines ),
vertexShader: DepthLimitedBlurShader.vertexShader,
fragmentShader: DepthLimitedBlurShader.fragmentShader
} );
this.vBlurMaterial.defines[ 'DEPTH_PACKING' ] = 0;
this.vBlurMaterial.defines[ 'PERSPECTIVE_CAMERA' ] = this.camera.isPerspectiveCamera ? 1 : 0;
this.vBlurMaterial.uniforms[ 'tDiffuse' ].value = this.saoRenderTarget.texture;
this.vBlurMaterial.uniforms[ 'tDepth' ].value = depthTexture;
this.vBlurMaterial.uniforms[ 'size' ].value.set( this.resolution.x, this.resolution.y );
this.vBlurMaterial.blending = NoBlending;
this.hBlurMaterial = new ShaderMaterial( {
uniforms: UniformsUtils.clone( DepthLimitedBlurShader.uniforms ),
defines: Object.assign( {}, DepthLimitedBlurShader.defines ),
vertexShader: DepthLimitedBlurShader.vertexShader,
fragmentShader: DepthLimitedBlurShader.fragmentShader
} );
this.hBlurMaterial.defines[ 'DEPTH_PACKING' ] = 0;
this.hBlurMaterial.defines[ 'PERSPECTIVE_CAMERA' ] = this.camera.isPerspectiveCamera ? 1 : 0;
this.hBlurMaterial.uniforms[ 'tDiffuse' ].value = this.blurIntermediateRenderTarget.texture;
this.hBlurMaterial.uniforms[ 'tDepth' ].value = depthTexture;
this.hBlurMaterial.uniforms[ 'size' ].value.set( this.resolution.x, this.resolution.y );
this.hBlurMaterial.blending = NoBlending;
this.materialCopy = new ShaderMaterial( {
uniforms: UniformsUtils.clone( CopyShader.uniforms ),
vertexShader: CopyShader.vertexShader,
fragmentShader: CopyShader.fragmentShader,
blending: NoBlending
} );
this.materialCopy.transparent = true;
this.materialCopy.depthTest = false;
this.materialCopy.depthWrite = false;
this.materialCopy.blending = CustomBlending;
this.materialCopy.blendSrc = DstColorFactor;
this.materialCopy.blendDst = ZeroFactor;
this.materialCopy.blendEquation = AddEquation;
this.materialCopy.blendSrcAlpha = DstAlphaFactor;
this.materialCopy.blendDstAlpha = ZeroFactor;
this.materialCopy.blendEquationAlpha = AddEquation;
this.fsQuad = new FullScreenQuad( null );
}
render( renderer, writeBuffer, readBuffer/*, deltaTime, maskActive*/ ) {
// Rendering readBuffer first when rendering to screen
if ( this.renderToScreen ) {
this.materialCopy.blending = NoBlending;
this.materialCopy.uniforms[ 'tDiffuse' ].value = readBuffer.texture;
this.materialCopy.needsUpdate = true;
this.renderPass( renderer, this.materialCopy, null );
}
renderer.getClearColor( this._oldClearColor );
this.oldClearAlpha = renderer.getClearAlpha();
const oldAutoClear = renderer.autoClear;
renderer.autoClear = false;
this.saoMaterial.uniforms[ 'bias' ].value = this.params.saoBias;
this.saoMaterial.uniforms[ 'intensity' ].value = this.params.saoIntensity;
this.saoMaterial.uniforms[ 'scale' ].value = this.params.saoScale;
this.saoMaterial.uniforms[ 'kernelRadius' ].value = this.params.saoKernelRadius;
this.saoMaterial.uniforms[ 'minResolution' ].value = this.params.saoMinResolution;
this.saoMaterial.uniforms[ 'cameraNear' ].value = this.camera.near;
this.saoMaterial.uniforms[ 'cameraFar' ].value = this.camera.far;
// this.saoMaterial.uniforms['randomSeed'].value = Math.random();
const depthCutoff = this.params.saoBlurDepthCutoff * ( this.camera.far - this.camera.near );
this.vBlurMaterial.uniforms[ 'depthCutoff' ].value = depthCutoff;
this.hBlurMaterial.uniforms[ 'depthCutoff' ].value = depthCutoff;
this.vBlurMaterial.uniforms[ 'cameraNear' ].value = this.camera.near;
this.vBlurMaterial.uniforms[ 'cameraFar' ].value = this.camera.far;
this.hBlurMaterial.uniforms[ 'cameraNear' ].value = this.camera.near;
this.hBlurMaterial.uniforms[ 'cameraFar' ].value = this.camera.far;
this.params.saoBlurRadius = Math.floor( this.params.saoBlurRadius );
if ( ( this.prevStdDev !== this.params.saoBlurStdDev ) || ( this.prevNumSamples !== this.params.saoBlurRadius ) ) {
BlurShaderUtils.configure( this.vBlurMaterial, this.params.saoBlurRadius, this.params.saoBlurStdDev, new Vector2( 0, 1 ) );
BlurShaderUtils.configure( this.hBlurMaterial, this.params.saoBlurRadius, this.params.saoBlurStdDev, new Vector2( 1, 0 ) );
this.prevStdDev = this.params.saoBlurStdDev;
this.prevNumSamples = this.params.saoBlurRadius;
}
// render normal and depth
this.renderOverride( renderer, this.normalMaterial, this.normalRenderTarget, 0x7777ff, 1.0 );
// Rendering SAO texture
this.renderPass( renderer, this.saoMaterial, this.saoRenderTarget, 0xffffff, 1.0 );
// Blurring SAO texture
if ( this.params.saoBlur ) {
this.renderPass( renderer, this.vBlurMaterial, this.blurIntermediateRenderTarget, 0xffffff, 1.0 );
this.renderPass( renderer, this.hBlurMaterial, this.saoRenderTarget, 0xffffff, 1.0 );
}
const outputMaterial = this.materialCopy;
// Setting up SAO rendering
if ( this.params.output === SAOPass.OUTPUT.Normal ) {
this.materialCopy.uniforms[ 'tDiffuse' ].value = this.normalRenderTarget.texture;
this.materialCopy.needsUpdate = true;
} else {
this.materialCopy.uniforms[ 'tDiffuse' ].value = this.saoRenderTarget.texture;
this.materialCopy.needsUpdate = true;
}
// Blending depends on output
if ( this.params.output === SAOPass.OUTPUT.Default ) {
outputMaterial.blending = CustomBlending;
} else {
outputMaterial.blending = NoBlending;
}
// Rendering SAOPass result on top of previous pass
this.renderPass( renderer, outputMaterial, this.renderToScreen ? null : readBuffer );
renderer.setClearColor( this._oldClearColor, this.oldClearAlpha );
renderer.autoClear = oldAutoClear;
}
renderPass( renderer, passMaterial, renderTarget, clearColor, clearAlpha ) {
// save original state
renderer.getClearColor( this.originalClearColor );
const originalClearAlpha = renderer.getClearAlpha();
const originalAutoClear = renderer.autoClear;
renderer.setRenderTarget( renderTarget );
// setup pass state
renderer.autoClear = false;
if ( ( clearColor !== undefined ) && ( clearColor !== null ) ) {
renderer.setClearColor( clearColor );
renderer.setClearAlpha( clearAlpha || 0.0 );
renderer.clear();
}
this.fsQuad.material = passMaterial;
this.fsQuad.render( renderer );
// restore original state
renderer.autoClear = originalAutoClear;
renderer.setClearColor( this.originalClearColor );
renderer.setClearAlpha( originalClearAlpha );
}
renderOverride( renderer, overrideMaterial, renderTarget, clearColor, clearAlpha ) {
renderer.getClearColor( this.originalClearColor );
const originalClearAlpha = renderer.getClearAlpha();
const originalAutoClear = renderer.autoClear;
renderer.setRenderTarget( renderTarget );
renderer.autoClear = false;
clearColor = overrideMaterial.clearColor || clearColor;
clearAlpha = overrideMaterial.clearAlpha || clearAlpha;
if ( ( clearColor !== undefined ) && ( clearColor !== null ) ) {
renderer.setClearColor( clearColor );
renderer.setClearAlpha( clearAlpha || 0.0 );
renderer.clear();
}
this.scene.overrideMaterial = overrideMaterial;
renderer.render( this.scene, this.camera );
this.scene.overrideMaterial = null;
// restore original state
renderer.autoClear = originalAutoClear;
renderer.setClearColor( this.originalClearColor );
renderer.setClearAlpha( originalClearAlpha );
}
setSize( width, height ) {
this.saoRenderTarget.setSize( width, height );
this.blurIntermediateRenderTarget.setSize( width, height );
this.normalRenderTarget.setSize( width, height );
this.saoMaterial.uniforms[ 'size' ].value.set( width, height );
this.saoMaterial.uniforms[ 'cameraInverseProjectionMatrix' ].value.copy( this.camera.projectionMatrixInverse );
this.saoMaterial.uniforms[ 'cameraProjectionMatrix' ].value = this.camera.projectionMatrix;
this.saoMaterial.needsUpdate = true;
this.vBlurMaterial.uniforms[ 'size' ].value.set( width, height );
this.vBlurMaterial.needsUpdate = true;
this.hBlurMaterial.uniforms[ 'size' ].value.set( width, height );
this.hBlurMaterial.needsUpdate = true;
}
dispose() {
this.saoRenderTarget.dispose();
this.blurIntermediateRenderTarget.dispose();
this.normalRenderTarget.dispose();
this.normalMaterial.dispose();
this.saoMaterial.dispose();
this.vBlurMaterial.dispose();
this.hBlurMaterial.dispose();
this.materialCopy.dispose();
this.fsQuad.dispose();
}
}
SAOPass.OUTPUT = {
'Default': 0,
'SAO': 1,
'Normal': 2
};
export { SAOPass };

View File

@ -0,0 +1,179 @@
import {
Matrix4,
Vector2
} from '/static/javascript/three/build/three.module.js';
/**
* TODO
*/
const SAOShader = {
name: 'SAOShader',
defines: {
'NUM_SAMPLES': 7,
'NUM_RINGS': 4,
'DIFFUSE_TEXTURE': 0,
'PERSPECTIVE_CAMERA': 1
},
uniforms: {
'tDepth': { value: null },
'tDiffuse': { value: null },
'tNormal': { value: null },
'size': { value: new Vector2( 512, 512 ) },
'cameraNear': { value: 1 },
'cameraFar': { value: 100 },
'cameraProjectionMatrix': { value: new Matrix4() },
'cameraInverseProjectionMatrix': { value: new Matrix4() },
'scale': { value: 1.0 },
'intensity': { value: 0.1 },
'bias': { value: 0.5 },
'minResolution': { value: 0.0 },
'kernelRadius': { value: 100.0 },
'randomSeed': { value: 0.0 }
},
vertexShader: /* glsl */`
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
#include <common>
varying vec2 vUv;
#if DIFFUSE_TEXTURE == 1
uniform sampler2D tDiffuse;
#endif
uniform highp sampler2D tDepth;
uniform highp sampler2D tNormal;
uniform float cameraNear;
uniform float cameraFar;
uniform mat4 cameraProjectionMatrix;
uniform mat4 cameraInverseProjectionMatrix;
uniform float scale;
uniform float intensity;
uniform float bias;
uniform float kernelRadius;
uniform float minResolution;
uniform vec2 size;
uniform float randomSeed;
// RGBA depth
#include <packing>
vec4 getDefaultColor( const in vec2 screenPosition ) {
#if DIFFUSE_TEXTURE == 1
return texture2D( tDiffuse, vUv );
#else
return vec4( 1.0 );
#endif
}
float getDepth( const in vec2 screenPosition ) {
return texture2D( tDepth, screenPosition ).x;
}
float getViewZ( const in float depth ) {
#if PERSPECTIVE_CAMERA == 1
return perspectiveDepthToViewZ( depth, cameraNear, cameraFar );
#else
return orthographicDepthToViewZ( depth, cameraNear, cameraFar );
#endif
}
vec3 getViewPosition( const in vec2 screenPosition, const in float depth, const in float viewZ ) {
float clipW = cameraProjectionMatrix[2][3] * viewZ + cameraProjectionMatrix[3][3];
vec4 clipPosition = vec4( ( vec3( screenPosition, depth ) - 0.5 ) * 2.0, 1.0 );
clipPosition *= clipW; // unprojection.
return ( cameraInverseProjectionMatrix * clipPosition ).xyz;
}
vec3 getViewNormal( const in vec3 viewPosition, const in vec2 screenPosition ) {
return unpackRGBToNormal( texture2D( tNormal, screenPosition ).xyz );
}
float scaleDividedByCameraFar;
float minResolutionMultipliedByCameraFar;
float getOcclusion( const in vec3 centerViewPosition, const in vec3 centerViewNormal, const in vec3 sampleViewPosition ) {
vec3 viewDelta = sampleViewPosition - centerViewPosition;
float viewDistance = length( viewDelta );
float scaledScreenDistance = scaleDividedByCameraFar * viewDistance;
return max(0.0, (dot(centerViewNormal, viewDelta) - minResolutionMultipliedByCameraFar) / scaledScreenDistance - bias) / (1.0 + pow2( scaledScreenDistance ) );
}
// moving costly divides into consts
const float ANGLE_STEP = PI2 * float( NUM_RINGS ) / float( NUM_SAMPLES );
const float INV_NUM_SAMPLES = 1.0 / float( NUM_SAMPLES );
float getAmbientOcclusion( const in vec3 centerViewPosition ) {
// precompute some variables require in getOcclusion.
scaleDividedByCameraFar = scale / cameraFar;
minResolutionMultipliedByCameraFar = minResolution * cameraFar;
vec3 centerViewNormal = getViewNormal( centerViewPosition, vUv );
// jsfiddle that shows sample pattern: https://jsfiddle.net/a16ff1p7/
float angle = rand( vUv + randomSeed ) * PI2;
vec2 radius = vec2( kernelRadius * INV_NUM_SAMPLES ) / size;
vec2 radiusStep = radius;
float occlusionSum = 0.0;
float weightSum = 0.0;
for( int i = 0; i < NUM_SAMPLES; i ++ ) {
vec2 sampleUv = vUv + vec2( cos( angle ), sin( angle ) ) * radius;
radius += radiusStep;
angle += ANGLE_STEP;
float sampleDepth = getDepth( sampleUv );
if( sampleDepth >= ( 1.0 - EPSILON ) ) {
continue;
}
float sampleViewZ = getViewZ( sampleDepth );
vec3 sampleViewPosition = getViewPosition( sampleUv, sampleDepth, sampleViewZ );
occlusionSum += getOcclusion( centerViewPosition, centerViewNormal, sampleViewPosition );
weightSum += 1.0;
}
if( weightSum == 0.0 ) discard;
return occlusionSum * ( intensity / weightSum );
}
void main() {
float centerDepth = getDepth( vUv );
if( centerDepth >= ( 1.0 - EPSILON ) ) {
discard;
}
float centerViewZ = getViewZ( centerDepth );
vec3 viewPosition = getViewPosition( vUv, centerDepth, centerViewZ );
float ambientOcclusion = getAmbientOcclusion( viewPosition );
gl_FragColor = getDefaultColor( vUv );
gl_FragColor.xyz *= 1.0 - ambientOcclusion;
}`
};
export { SAOShader };

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,466 @@
import {
Vector2
} from '/static/javascript/three/build/three.module.js';
/**
* WebGL port of Subpixel Morphological Antialiasing (SMAA) v2.8
* Preset: SMAA 1x Medium (with color edge detection)
* https://github.com/iryoku/smaa/releases/tag/v2.8
*/
const SMAAEdgesShader = {
name: 'SMAAEdgesShader',
defines: {
'SMAA_THRESHOLD': '0.1'
},
uniforms: {
'tDiffuse': { value: null },
'resolution': { value: new Vector2( 1 / 1024, 1 / 512 ) }
},
vertexShader: /* glsl */`
uniform vec2 resolution;
varying vec2 vUv;
varying vec4 vOffset[ 3 ];
void SMAAEdgeDetectionVS( vec2 texcoord ) {
vOffset[ 0 ] = texcoord.xyxy + resolution.xyxy * vec4( -1.0, 0.0, 0.0, 1.0 ); // WebGL port note: Changed sign in W component
vOffset[ 1 ] = texcoord.xyxy + resolution.xyxy * vec4( 1.0, 0.0, 0.0, -1.0 ); // WebGL port note: Changed sign in W component
vOffset[ 2 ] = texcoord.xyxy + resolution.xyxy * vec4( -2.0, 0.0, 0.0, 2.0 ); // WebGL port note: Changed sign in W component
}
void main() {
vUv = uv;
SMAAEdgeDetectionVS( vUv );
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
varying vec2 vUv;
varying vec4 vOffset[ 3 ];
vec4 SMAAColorEdgeDetectionPS( vec2 texcoord, vec4 offset[3], sampler2D colorTex ) {
vec2 threshold = vec2( SMAA_THRESHOLD, SMAA_THRESHOLD );
// Calculate color deltas:
vec4 delta;
vec3 C = texture2D( colorTex, texcoord ).rgb;
vec3 Cleft = texture2D( colorTex, offset[0].xy ).rgb;
vec3 t = abs( C - Cleft );
delta.x = max( max( t.r, t.g ), t.b );
vec3 Ctop = texture2D( colorTex, offset[0].zw ).rgb;
t = abs( C - Ctop );
delta.y = max( max( t.r, t.g ), t.b );
// We do the usual threshold:
vec2 edges = step( threshold, delta.xy );
// Then discard if there is no edge:
if ( dot( edges, vec2( 1.0, 1.0 ) ) == 0.0 )
discard;
// Calculate right and bottom deltas:
vec3 Cright = texture2D( colorTex, offset[1].xy ).rgb;
t = abs( C - Cright );
delta.z = max( max( t.r, t.g ), t.b );
vec3 Cbottom = texture2D( colorTex, offset[1].zw ).rgb;
t = abs( C - Cbottom );
delta.w = max( max( t.r, t.g ), t.b );
// Calculate the maximum delta in the direct neighborhood:
float maxDelta = max( max( max( delta.x, delta.y ), delta.z ), delta.w );
// Calculate left-left and top-top deltas:
vec3 Cleftleft = texture2D( colorTex, offset[2].xy ).rgb;
t = abs( C - Cleftleft );
delta.z = max( max( t.r, t.g ), t.b );
vec3 Ctoptop = texture2D( colorTex, offset[2].zw ).rgb;
t = abs( C - Ctoptop );
delta.w = max( max( t.r, t.g ), t.b );
// Calculate the final maximum delta:
maxDelta = max( max( maxDelta, delta.z ), delta.w );
// Local contrast adaptation in action:
edges.xy *= step( 0.5 * maxDelta, delta.xy );
return vec4( edges, 0.0, 0.0 );
}
void main() {
gl_FragColor = SMAAColorEdgeDetectionPS( vUv, vOffset, tDiffuse );
}`
};
const SMAAWeightsShader = {
name: 'SMAAWeightsShader',
defines: {
'SMAA_MAX_SEARCH_STEPS': '8',
'SMAA_AREATEX_MAX_DISTANCE': '16',
'SMAA_AREATEX_PIXEL_SIZE': '( 1.0 / vec2( 160.0, 560.0 ) )',
'SMAA_AREATEX_SUBTEX_SIZE': '( 1.0 / 7.0 )'
},
uniforms: {
'tDiffuse': { value: null },
'tArea': { value: null },
'tSearch': { value: null },
'resolution': { value: new Vector2( 1 / 1024, 1 / 512 ) }
},
vertexShader: /* glsl */`
uniform vec2 resolution;
varying vec2 vUv;
varying vec4 vOffset[ 3 ];
varying vec2 vPixcoord;
void SMAABlendingWeightCalculationVS( vec2 texcoord ) {
vPixcoord = texcoord / resolution;
// We will use these offsets for the searches later on (see @PSEUDO_GATHER4):
vOffset[ 0 ] = texcoord.xyxy + resolution.xyxy * vec4( -0.25, 0.125, 1.25, 0.125 ); // WebGL port note: Changed sign in Y and W components
vOffset[ 1 ] = texcoord.xyxy + resolution.xyxy * vec4( -0.125, 0.25, -0.125, -1.25 ); // WebGL port note: Changed sign in Y and W components
// And these for the searches, they indicate the ends of the loops:
vOffset[ 2 ] = vec4( vOffset[ 0 ].xz, vOffset[ 1 ].yw ) + vec4( -2.0, 2.0, -2.0, 2.0 ) * resolution.xxyy * float( SMAA_MAX_SEARCH_STEPS );
}
void main() {
vUv = uv;
SMAABlendingWeightCalculationVS( vUv );
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
#define SMAASampleLevelZeroOffset( tex, coord, offset ) texture2D( tex, coord + float( offset ) * resolution, 0.0 )
uniform sampler2D tDiffuse;
uniform sampler2D tArea;
uniform sampler2D tSearch;
uniform vec2 resolution;
varying vec2 vUv;
varying vec4 vOffset[3];
varying vec2 vPixcoord;
#if __VERSION__ == 100
vec2 round( vec2 x ) {
return sign( x ) * floor( abs( x ) + 0.5 );
}
#endif
float SMAASearchLength( sampler2D searchTex, vec2 e, float bias, float scale ) {
// Not required if searchTex accesses are set to point:
// float2 SEARCH_TEX_PIXEL_SIZE = 1.0 / float2(66.0, 33.0);
// e = float2(bias, 0.0) + 0.5 * SEARCH_TEX_PIXEL_SIZE +
// e * float2(scale, 1.0) * float2(64.0, 32.0) * SEARCH_TEX_PIXEL_SIZE;
e.r = bias + e.r * scale;
return 255.0 * texture2D( searchTex, e, 0.0 ).r;
}
float SMAASearchXLeft( sampler2D edgesTex, sampler2D searchTex, vec2 texcoord, float end ) {
/**
* @PSEUDO_GATHER4
* This texcoord has been offset by (-0.25, -0.125) in the vertex shader to
* sample between edge, thus fetching four edges in a row.
* Sampling with different offsets in each direction allows to disambiguate
* which edges are active from the four fetched ones.
*/
vec2 e = vec2( 0.0, 1.0 );
for ( int i = 0; i < SMAA_MAX_SEARCH_STEPS; i ++ ) { // WebGL port note: Changed while to for
e = texture2D( edgesTex, texcoord, 0.0 ).rg;
texcoord -= vec2( 2.0, 0.0 ) * resolution;
if ( ! ( texcoord.x > end && e.g > 0.8281 && e.r == 0.0 ) ) break;
}
// We correct the previous (-0.25, -0.125) offset we applied:
texcoord.x += 0.25 * resolution.x;
// The searches are bias by 1, so adjust the coords accordingly:
texcoord.x += resolution.x;
// Disambiguate the length added by the last step:
texcoord.x += 2.0 * resolution.x; // Undo last step
texcoord.x -= resolution.x * SMAASearchLength(searchTex, e, 0.0, 0.5);
return texcoord.x;
}
float SMAASearchXRight( sampler2D edgesTex, sampler2D searchTex, vec2 texcoord, float end ) {
vec2 e = vec2( 0.0, 1.0 );
for ( int i = 0; i < SMAA_MAX_SEARCH_STEPS; i ++ ) { // WebGL port note: Changed while to for
e = texture2D( edgesTex, texcoord, 0.0 ).rg;
texcoord += vec2( 2.0, 0.0 ) * resolution;
if ( ! ( texcoord.x < end && e.g > 0.8281 && e.r == 0.0 ) ) break;
}
texcoord.x -= 0.25 * resolution.x;
texcoord.x -= resolution.x;
texcoord.x -= 2.0 * resolution.x;
texcoord.x += resolution.x * SMAASearchLength( searchTex, e, 0.5, 0.5 );
return texcoord.x;
}
float SMAASearchYUp( sampler2D edgesTex, sampler2D searchTex, vec2 texcoord, float end ) {
vec2 e = vec2( 1.0, 0.0 );
for ( int i = 0; i < SMAA_MAX_SEARCH_STEPS; i ++ ) { // WebGL port note: Changed while to for
e = texture2D( edgesTex, texcoord, 0.0 ).rg;
texcoord += vec2( 0.0, 2.0 ) * resolution; // WebGL port note: Changed sign
if ( ! ( texcoord.y > end && e.r > 0.8281 && e.g == 0.0 ) ) break;
}
texcoord.y -= 0.25 * resolution.y; // WebGL port note: Changed sign
texcoord.y -= resolution.y; // WebGL port note: Changed sign
texcoord.y -= 2.0 * resolution.y; // WebGL port note: Changed sign
texcoord.y += resolution.y * SMAASearchLength( searchTex, e.gr, 0.0, 0.5 ); // WebGL port note: Changed sign
return texcoord.y;
}
float SMAASearchYDown( sampler2D edgesTex, sampler2D searchTex, vec2 texcoord, float end ) {
vec2 e = vec2( 1.0, 0.0 );
for ( int i = 0; i < SMAA_MAX_SEARCH_STEPS; i ++ ) { // WebGL port note: Changed while to for
e = texture2D( edgesTex, texcoord, 0.0 ).rg;
texcoord -= vec2( 0.0, 2.0 ) * resolution; // WebGL port note: Changed sign
if ( ! ( texcoord.y < end && e.r > 0.8281 && e.g == 0.0 ) ) break;
}
texcoord.y += 0.25 * resolution.y; // WebGL port note: Changed sign
texcoord.y += resolution.y; // WebGL port note: Changed sign
texcoord.y += 2.0 * resolution.y; // WebGL port note: Changed sign
texcoord.y -= resolution.y * SMAASearchLength( searchTex, e.gr, 0.5, 0.5 ); // WebGL port note: Changed sign
return texcoord.y;
}
vec2 SMAAArea( sampler2D areaTex, vec2 dist, float e1, float e2, float offset ) {
// Rounding prevents precision errors of bilinear filtering:
vec2 texcoord = float( SMAA_AREATEX_MAX_DISTANCE ) * round( 4.0 * vec2( e1, e2 ) ) + dist;
// We do a scale and bias for mapping to texel space:
texcoord = SMAA_AREATEX_PIXEL_SIZE * texcoord + ( 0.5 * SMAA_AREATEX_PIXEL_SIZE );
// Move to proper place, according to the subpixel offset:
texcoord.y += SMAA_AREATEX_SUBTEX_SIZE * offset;
return texture2D( areaTex, texcoord, 0.0 ).rg;
}
vec4 SMAABlendingWeightCalculationPS( vec2 texcoord, vec2 pixcoord, vec4 offset[ 3 ], sampler2D edgesTex, sampler2D areaTex, sampler2D searchTex, ivec4 subsampleIndices ) {
vec4 weights = vec4( 0.0, 0.0, 0.0, 0.0 );
vec2 e = texture2D( edgesTex, texcoord ).rg;
if ( e.g > 0.0 ) { // Edge at north
vec2 d;
// Find the distance to the left:
vec2 coords;
coords.x = SMAASearchXLeft( edgesTex, searchTex, offset[ 0 ].xy, offset[ 2 ].x );
coords.y = offset[ 1 ].y; // offset[1].y = texcoord.y - 0.25 * resolution.y (@CROSSING_OFFSET)
d.x = coords.x;
// Now fetch the left crossing edges, two at a time using bilinear
// filtering. Sampling at -0.25 (see @CROSSING_OFFSET) enables to
// discern what value each edge has:
float e1 = texture2D( edgesTex, coords, 0.0 ).r;
// Find the distance to the right:
coords.x = SMAASearchXRight( edgesTex, searchTex, offset[ 0 ].zw, offset[ 2 ].y );
d.y = coords.x;
// We want the distances to be in pixel units (doing this here allow to
// better interleave arithmetic and memory accesses):
d = d / resolution.x - pixcoord.x;
// SMAAArea below needs a sqrt, as the areas texture is compressed
// quadratically:
vec2 sqrt_d = sqrt( abs( d ) );
// Fetch the right crossing edges:
coords.y -= 1.0 * resolution.y; // WebGL port note: Added
float e2 = SMAASampleLevelZeroOffset( edgesTex, coords, ivec2( 1, 0 ) ).r;
// Ok, we know how this pattern looks like, now it is time for getting
// the actual area:
weights.rg = SMAAArea( areaTex, sqrt_d, e1, e2, float( subsampleIndices.y ) );
}
if ( e.r > 0.0 ) { // Edge at west
vec2 d;
// Find the distance to the top:
vec2 coords;
coords.y = SMAASearchYUp( edgesTex, searchTex, offset[ 1 ].xy, offset[ 2 ].z );
coords.x = offset[ 0 ].x; // offset[1].x = texcoord.x - 0.25 * resolution.x;
d.x = coords.y;
// Fetch the top crossing edges:
float e1 = texture2D( edgesTex, coords, 0.0 ).g;
// Find the distance to the bottom:
coords.y = SMAASearchYDown( edgesTex, searchTex, offset[ 1 ].zw, offset[ 2 ].w );
d.y = coords.y;
// We want the distances to be in pixel units:
d = d / resolution.y - pixcoord.y;
// SMAAArea below needs a sqrt, as the areas texture is compressed
// quadratically:
vec2 sqrt_d = sqrt( abs( d ) );
// Fetch the bottom crossing edges:
coords.y -= 1.0 * resolution.y; // WebGL port note: Added
float e2 = SMAASampleLevelZeroOffset( edgesTex, coords, ivec2( 0, 1 ) ).g;
// Get the area for this direction:
weights.ba = SMAAArea( areaTex, sqrt_d, e1, e2, float( subsampleIndices.x ) );
}
return weights;
}
void main() {
gl_FragColor = SMAABlendingWeightCalculationPS( vUv, vPixcoord, vOffset, tDiffuse, tArea, tSearch, ivec4( 0.0 ) );
}`
};
const SMAABlendShader = {
name: 'SMAABlendShader',
uniforms: {
'tDiffuse': { value: null },
'tColor': { value: null },
'resolution': { value: new Vector2( 1 / 1024, 1 / 512 ) }
},
vertexShader: /* glsl */`
uniform vec2 resolution;
varying vec2 vUv;
varying vec4 vOffset[ 2 ];
void SMAANeighborhoodBlendingVS( vec2 texcoord ) {
vOffset[ 0 ] = texcoord.xyxy + resolution.xyxy * vec4( -1.0, 0.0, 0.0, 1.0 ); // WebGL port note: Changed sign in W component
vOffset[ 1 ] = texcoord.xyxy + resolution.xyxy * vec4( 1.0, 0.0, 0.0, -1.0 ); // WebGL port note: Changed sign in W component
}
void main() {
vUv = uv;
SMAANeighborhoodBlendingVS( vUv );
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}`,
fragmentShader: /* glsl */`
uniform sampler2D tDiffuse;
uniform sampler2D tColor;
uniform vec2 resolution;
varying vec2 vUv;
varying vec4 vOffset[ 2 ];
vec4 SMAANeighborhoodBlendingPS( vec2 texcoord, vec4 offset[ 2 ], sampler2D colorTex, sampler2D blendTex ) {
// Fetch the blending weights for current pixel:
vec4 a;
a.xz = texture2D( blendTex, texcoord ).xz;
a.y = texture2D( blendTex, offset[ 1 ].zw ).g;
a.w = texture2D( blendTex, offset[ 1 ].xy ).a;
// Is there any blending weight with a value greater than 0.0?
if ( dot(a, vec4( 1.0, 1.0, 1.0, 1.0 )) < 1e-5 ) {
return texture2D( colorTex, texcoord, 0.0 );
} else {
// Up to 4 lines can be crossing a pixel (one through each edge). We
// favor blending by choosing the line with the maximum weight for each
// direction:
vec2 offset;
offset.x = a.a > a.b ? a.a : -a.b; // left vs. right
offset.y = a.g > a.r ? -a.g : a.r; // top vs. bottom // WebGL port note: Changed signs
// Then we go in the direction that has the maximum weight:
if ( abs( offset.x ) > abs( offset.y )) { // horizontal vs. vertical
offset.y = 0.0;
} else {
offset.x = 0.0;
}
// Fetch the opposite color and lerp by hand:
vec4 C = texture2D( colorTex, texcoord, 0.0 );
texcoord += sign( offset ) * resolution;
vec4 Cop = texture2D( colorTex, texcoord, 0.0 );
float s = abs( offset.x ) > abs( offset.y ) ? abs( offset.x ) : abs( offset.y );
// WebGL port note: Added gamma correction
C.xyz = pow(C.xyz, vec3(2.2));
Cop.xyz = pow(Cop.xyz, vec3(2.2));
vec4 mixed = mix(C, Cop, s);
mixed.xyz = pow(mixed.xyz, vec3(1.0 / 2.2));
return mixed;
}
}
void main() {
gl_FragColor = SMAANeighborhoodBlendingPS( vUv, vOffset, tColor, tDiffuse );
}`
};
export { SMAAEdgesShader, SMAAWeightsShader, SMAABlendShader };

View File

@ -0,0 +1,228 @@
import {
AdditiveBlending,
Color,
HalfFloatType,
ShaderMaterial,
UniformsUtils,
WebGLRenderTarget
} from '/static/javascript/three/build/three.module.js';
import { Pass, FullScreenQuad } from'/static/javascript/three/examples/jsm/postprocessing/Pass.js';
import { CopyShader } from'/static/javascript/three/examples/jsm/shaders/CopyShader.js';
/**
*
* Supersample Anti-Aliasing Render Pass
*
* This manual approach to SSAA re-renders the scene ones for each sample with camera jitter and accumulates the results.
*
* References: https://en.wikipedia.org/wiki/Supersampling
*
*/
class SSAARenderPass extends Pass {
constructor( scene, camera, clearColor, clearAlpha ) {
super();
this.scene = scene;
this.camera = camera;
this.sampleLevel = 4; // specified as n, where the number of samples is 2^n, so sampleLevel = 4, is 2^4 samples, 16.
this.unbiased = true;
// as we need to clear the buffer in this pass, clearColor must be set to something, defaults to black.
this.clearColor = ( clearColor !== undefined ) ? clearColor : 0x000000;
this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 0;
this._oldClearColor = new Color();
const copyShader = CopyShader;
this.copyUniforms = UniformsUtils.clone( copyShader.uniforms );
this.copyMaterial = new ShaderMaterial( {
uniforms: this.copyUniforms,
vertexShader: copyShader.vertexShader,
fragmentShader: copyShader.fragmentShader,
transparent: true,
depthTest: false,
depthWrite: false,
premultipliedAlpha: true,
blending: AdditiveBlending
} );
this.fsQuad = new FullScreenQuad( this.copyMaterial );
}
dispose() {
if ( this.sampleRenderTarget ) {
this.sampleRenderTarget.dispose();
this.sampleRenderTarget = null;
}
this.copyMaterial.dispose();
this.fsQuad.dispose();
}
setSize( width, height ) {
if ( this.sampleRenderTarget ) this.sampleRenderTarget.setSize( width, height );
}
render( renderer, writeBuffer, readBuffer ) {
if ( ! this.sampleRenderTarget ) {
this.sampleRenderTarget = new WebGLRenderTarget( readBuffer.width, readBuffer.height, { type: HalfFloatType } );
this.sampleRenderTarget.texture.name = 'SSAARenderPass.sample';
}
const jitterOffsets = _JitterVectors[ Math.max( 0, Math.min( this.sampleLevel, 5 ) ) ];
const autoClear = renderer.autoClear;
renderer.autoClear = false;
renderer.getClearColor( this._oldClearColor );
const oldClearAlpha = renderer.getClearAlpha();
const baseSampleWeight = 1.0 / jitterOffsets.length;
const roundingRange = 1 / 32;
this.copyUniforms[ 'tDiffuse' ].value = this.sampleRenderTarget.texture;
const viewOffset = {
fullWidth: readBuffer.width,
fullHeight: readBuffer.height,
offsetX: 0,
offsetY: 0,
width: readBuffer.width,
height: readBuffer.height
};
const originalViewOffset = Object.assign( {}, this.camera.view );
if ( originalViewOffset.enabled ) Object.assign( viewOffset, originalViewOffset );
// render the scene multiple times, each slightly jitter offset from the last and accumulate the results.
for ( let i = 0; i < jitterOffsets.length; i ++ ) {
const jitterOffset = jitterOffsets[ i ];
if ( this.camera.setViewOffset ) {
this.camera.setViewOffset(
viewOffset.fullWidth, viewOffset.fullHeight,
viewOffset.offsetX + jitterOffset[ 0 ] * 0.0625, viewOffset.offsetY + jitterOffset[ 1 ] * 0.0625, // 0.0625 = 1 / 16
viewOffset.width, viewOffset.height
);
}
let sampleWeight = baseSampleWeight;
if ( this.unbiased ) {
// the theory is that equal weights for each sample lead to an accumulation of rounding errors.
// The following equation varies the sampleWeight per sample so that it is uniformly distributed
// across a range of values whose rounding errors cancel each other out.
const uniformCenteredDistribution = ( - 0.5 + ( i + 0.5 ) / jitterOffsets.length );
sampleWeight += roundingRange * uniformCenteredDistribution;
}
this.copyUniforms[ 'opacity' ].value = sampleWeight;
renderer.setClearColor( this.clearColor, this.clearAlpha );
renderer.setRenderTarget( this.sampleRenderTarget );
renderer.clear();
renderer.render( this.scene, this.camera );
renderer.setRenderTarget( this.renderToScreen ? null : writeBuffer );
if ( i === 0 ) {
renderer.setClearColor( 0x000000, 0.0 );
renderer.clear();
}
this.fsQuad.render( renderer );
}
if ( this.camera.setViewOffset && originalViewOffset.enabled ) {
this.camera.setViewOffset(
originalViewOffset.fullWidth, originalViewOffset.fullHeight,
originalViewOffset.offsetX, originalViewOffset.offsetY,
originalViewOffset.width, originalViewOffset.height
);
} else if ( this.camera.clearViewOffset ) {
this.camera.clearViewOffset();
}
renderer.autoClear = autoClear;
renderer.setClearColor( this._oldClearColor, oldClearAlpha );
}
}
// These jitter vectors are specified in integers because it is easier.
// I am assuming a [-8,8) integer grid, but it needs to be mapped onto [-0.5,0.5)
// before being used, thus these integers need to be scaled by 1/16.
//
// Sample patterns reference: https://msdn.microsoft.com/en-us/library/windows/desktop/ff476218%28v=vs.85%29.aspx?f=255&MSPPError=-2147217396
const _JitterVectors = [
[
[ 0, 0 ]
],
[
[ 4, 4 ], [ - 4, - 4 ]
],
[
[ - 2, - 6 ], [ 6, - 2 ], [ - 6, 2 ], [ 2, 6 ]
],
[
[ 1, - 3 ], [ - 1, 3 ], [ 5, 1 ], [ - 3, - 5 ],
[ - 5, 5 ], [ - 7, - 1 ], [ 3, 7 ], [ 7, - 7 ]
],
[
[ 1, 1 ], [ - 1, - 3 ], [ - 3, 2 ], [ 4, - 1 ],
[ - 5, - 2 ], [ 2, 5 ], [ 5, 3 ], [ 3, - 5 ],
[ - 2, 6 ], [ 0, - 7 ], [ - 4, - 6 ], [ - 6, 4 ],
[ - 8, 0 ], [ 7, - 4 ], [ 6, 7 ], [ - 7, - 8 ]
],
[
[ - 4, - 7 ], [ - 7, - 5 ], [ - 3, - 5 ], [ - 5, - 4 ],
[ - 1, - 4 ], [ - 2, - 2 ], [ - 6, - 1 ], [ - 4, 0 ],
[ - 7, 1 ], [ - 1, 2 ], [ - 6, 3 ], [ - 3, 3 ],
[ - 7, 6 ], [ - 3, 6 ], [ - 5, 7 ], [ - 1, 7 ],
[ 5, - 7 ], [ 1, - 6 ], [ 6, - 5 ], [ 4, - 4 ],
[ 2, - 3 ], [ 7, - 2 ], [ 1, - 1 ], [ 4, - 1 ],
[ 2, 1 ], [ 6, 2 ], [ 0, 4 ], [ 4, 4 ],
[ 2, 5 ], [ 7, 5 ], [ 5, 6 ], [ 3, 7 ]
]
];
export { SSAARenderPass };

Some files were not shown because too many files have changed in this diff Show More