officialAccount/node_modules/three/examples/jsm/tsl/display/SMAANode.js

769 lines
66 KiB
JavaScript
Raw Normal View History

2025-05-26 08:59:24 +08:00
import { HalfFloatType, LinearFilter, NearestFilter, RenderTarget, Texture, Vector2, QuadMesh, NodeMaterial, TempNode, RendererUtils } from 'three/webgpu';
import { abs, nodeObject, Fn, NodeUpdateType, uv, uniform, convertToTexture, varyingProperty, vec2, vec4, modelViewProjection, passTexture, max, step, dot, float, texture, If, Loop, int, Break, sqrt, sign, mix } from 'three/tsl';
const _quadMesh = /*@__PURE__*/ new QuadMesh();
const _size = /*@__PURE__*/ new Vector2();
let _rendererState;
/**
* Post processing node for applying SMAA. Unlike FXAA, this node
* should be applied before converting colors to sRGB. SMAA should produce
* better results than FXAA but is also more expensive to execute.
*
* Used Preset: SMAA 1x Medium (with color edge detection)
* Reference: {@link https://github.com/iryoku/smaa/releases/tag/v2.8}.
*
* @augments TempNode
* @three_import import { smaa } from 'three/addons/tsl/display/SMAANode.js';
*/
class SMAANode extends TempNode {
static get type() {
return 'SMAANode';
}
/**
* Constructs a new SMAA node.
*
* @param {TextureNode} textureNode - The texture node that represents the input of the effect.
*/
constructor( textureNode ) {
super( 'vec4' );
/**
* The texture node that represents the input of the effect.
*
* @type {TextureNode}
*/
this.textureNode = textureNode;
/**
* The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders
* its effect once per frame in `updateBefore()`.
*
* @type {string}
* @default 'frame'
*/
this.updateBeforeType = NodeUpdateType.FRAME;
/**
* The render target used for the edges pass.
*
* @private
* @type {RenderTarget}
*/
this._renderTargetEdges = new RenderTarget( 1, 1, { depthBuffer: false, type: HalfFloatType } );
this._renderTargetEdges.texture.name = 'SMAANode.edges';
/**
* The render target used for the weights pass.
*
* @private
* @type {RenderTarget}
*/
this._renderTargetWeights = new RenderTarget( 1, 1, { depthBuffer: false, type: HalfFloatType } );
this._renderTargetWeights.texture.name = 'SMAANode.weights';
/**
* The render target used for the blend pass.
*
* @private
* @type {RenderTarget}
*/
this._renderTargetBlend = new RenderTarget( 1, 1, { depthBuffer: false, type: HalfFloatType } );
this._renderTargetBlend.texture.name = 'SMAANode.blend';
// textures
const scope = this;
const areaTextureImage = new Image();
areaTextureImage.src = this._getAreaTexture();
areaTextureImage.onload = function () {
// assigning data to HTMLImageElement.src is asynchronous (see #15162)
scope._areaTexture.needsUpdate = true;
};
/**
* Represents the "area" texture used by the SMAA implementation.
*
* @private
* @type {RenderTarget}
*/
this._areaTexture = new Texture();
this._areaTexture.name = 'SMAANode.area';
this._areaTexture.image = areaTextureImage;
this._areaTexture.minFilter = LinearFilter;
this._areaTexture.generateMipmaps = false;
this._areaTexture.flipY = false;
const searchTextureImage = new Image();
searchTextureImage.src = this._getSearchTexture();
searchTextureImage.onload = function () {
// assigning data to HTMLImageElement.src is asynchronous (see #15162)
scope._searchTexture.needsUpdate = true;
};
/**
* Represents the "search" texture used by the SMAA implementation.
*
* @private
* @type {RenderTarget}
*/
this._searchTexture = new Texture();
this._searchTexture.name = 'SMAANode.search';
this._searchTexture.image = searchTextureImage;
this._searchTexture.magFilter = NearestFilter;
this._searchTexture.minFilter = NearestFilter;
this._searchTexture.generateMipmaps = false;
this._searchTexture.flipY = false;
/**
* A uniform node holding the inverse resolution value.
*
* @private
* @type {UniformNode<vec2>}
*/
this._invSize = uniform( new Vector2() );
/**
* A uniform texture node holding the area texture.
*
* @private
* @type {TextureNode}
*/
this._areaTextureUniform = texture( this._areaTexture );
/**
* A uniform texture node holding the search texture.
*
* @private
* @type {TextureNode}
*/
this._searchTextureUniform = texture( this._searchTexture );
/**
* A uniform texture node representing the edges pass.
*
* @private
* @type {TextureNode}
*/
this._edgesTextureUniform = texture( this._renderTargetEdges.texture );
/**
* A uniform texture node representing the weights pass.
*
* @private
* @type {TextureNode}
*/
this._weightsTextureUniform = texture( this._renderTargetWeights.texture );
/**
* The node material that holds the TSL for rendering the edges pass.
*
* @private
* @type {NodeMaterial}
*/
this._materialEdges = new NodeMaterial();
this._materialEdges.name = 'SMAANode.edges';
/**
* The node material that holds the TSL for rendering the weights pass.
*
* @private
* @type {NodeMaterial}
*/
this._materialWeights = new NodeMaterial();
this._materialWeights.name = 'SMAANode.weights';
/**
* The node material that holds the TSL for rendering the blend pass.
*
* @private
* @type {NodeMaterial}
*/
this._materialBlend = new NodeMaterial();
this._materialBlend.name = 'SMAANode.blend';
/**
* The result of the effect is represented as a separate texture node.
*
* @private
* @type {PassTextureNode}
*/
this._textureNode = passTexture( this, this._renderTargetBlend.texture );
}
/**
* Returns the result of the effect as a texture node.
*
* @return {PassTextureNode} A texture node that represents the result of the effect.
*/
getTextureNode() {
return this._textureNode;
}
/**
* Sets the size of the effect.
*
* @param {number} width - The width of the effect.
* @param {number} height - The height of the effect.
*/
setSize( width, height ) {
this._invSize.value.set( 1 / width, 1 / height );
this._renderTargetEdges.setSize( width, height );
this._renderTargetWeights.setSize( width, height );
this._renderTargetBlend.setSize( width, height );
}
/**
* This method is used to render the effect once per frame.
*
* @param {NodeFrame} frame - The current node frame.
*/
updateBefore( frame ) {
const { renderer } = frame;
_rendererState = RendererUtils.resetRendererState( renderer, _rendererState );
//
const size = renderer.getDrawingBufferSize( _size );
this.setSize( size.width, size.height );
// edges
renderer.setRenderTarget( this._renderTargetEdges );
_quadMesh.material = this._materialEdges;
_quadMesh.render( renderer );
// weights
renderer.setRenderTarget( this._renderTargetWeights );
_quadMesh.material = this._materialWeights;
_quadMesh.render( renderer );
// blend
renderer.setRenderTarget( this._renderTargetBlend );
_quadMesh.material = this._materialBlend;
_quadMesh.render( renderer );
// restore
RendererUtils.restoreRendererState( renderer, _rendererState );
}
/**
* This method is used to setup the effect's TSL code.
*
* @param {NodeBuilder} builder - The current node builder.
* @return {PassTextureNode}
*/
setup( builder ) {
const SMAA_THRESHOLD = 0.1;
const SMAA_MAX_SEARCH_STEPS = 8;
const SMAA_AREATEX_MAX_DISTANCE = 16;
const SMAA_AREATEX_PIXEL_SIZE = vec2( 1 / 160, 1 / 560 );
const SMAA_AREATEX_SUBTEX_SIZE = ( 1 / 7 );
const textureNode = this.textureNode;
const uvNode = textureNode.uvNode || uv();
// edges
const SMAAEdgeDetectionVS = Fn( () => {
const vOffset0 = vec4( uvNode.xy, uvNode.xy ).add( vec4( this._invSize.xy, this._invSize.xy ).mul( vec4( - 1.0, 0.0, 0.0, - 1.0 ) ) );
const vOffset1 = vec4( uvNode.xy, uvNode.xy ).add( vec4( this._invSize.xy, this._invSize.xy ).mul( vec4( 1.0, 0.0, 0.0, 1.0 ) ) );
const vOffset2 = vec4( uvNode.xy, uvNode.xy ).add( vec4( this._invSize.xy, this._invSize.xy ).mul( vec4( - 2.0, 0.0, 0.0, - 2.0 ) ) );
varyingProperty( 'vec4', 'vOffset0' ).assign( vOffset0 );
varyingProperty( 'vec4', 'vOffset1' ).assign( vOffset1 );
varyingProperty( 'vec4', 'vOffset2' ).assign( vOffset2 );
return modelViewProjection;
} );
const SMAAEdgeDetectionFS = Fn( () => {
const vOffset0 = varyingProperty( 'vec4', 'vOffset0' );
const vOffset1 = varyingProperty( 'vec4', 'vOffset1' );
const vOffset2 = varyingProperty( 'vec4', 'vOffset2' );
const threshold = vec2( SMAA_THRESHOLD, SMAA_THRESHOLD );
// Calculate color deltas:
const delta = vec4().toVar();
const C = this.textureNode.sample( uvNode ).rgb.toVar();
// Calculate left and top deltas:
const Cleft = this.textureNode.sample( vOffset0.xy ).rgb.toVar();
let t = abs( C.sub( Cleft ) );
delta.x = max( max( t.r, t.g ), t.b );
const Ctop = this.textureNode.sample( vOffset0.zw ).rgb.toVar();
t = abs( C.sub( Ctop ) );
delta.y = max( max( t.r, t.g ), t.b );
// We do the usual threshold:
const edges = step( threshold, delta.xy ).toVar();
// Then discard if there is no edge:
dot( edges, vec2( 1.0, 1.0 ) ).equal( 0 ).discard();
// Calculate right and bottom deltas:
const Cright = this.textureNode.sample( vOffset1.xy ).rgb.toVar();
t = abs( C.sub( Cright ) );
delta.z = max( max( t.r, t.g ), t.b );
const Cbottom = this.textureNode.sample( vOffset1.zw ).rgb.toVar();
t = abs( C.sub( Cbottom ) );
delta.w = max( max( t.r, t.g ), t.b );
// Calculate the maximum delta in the direct neighborhood:
let maxDelta = max( max( max( delta.x, delta.y ), delta.z ), delta.w ).toVar();
// Calculate left-left and top-top deltas:
const Cleftleft = this.textureNode.sample( vOffset2.xy ).rgb.toVar();
t = abs( C.sub( Cleftleft ) );
delta.z = max( max( t.r, t.g ), t.b );
const Ctoptop = this.textureNode.sample( vOffset2.zw ).rgb.toVar();
t = abs( C.sub( Ctoptop ) );
delta.w = max( max( t.r, t.g ), t.b );
// Calculate the final maximum delta:
maxDelta = max( max( maxDelta, delta.z ), delta.w );
// Local contrast adaptation in action:
edges.xy.mulAssign( vec2( step( float( 0.5 ).mul( maxDelta ), delta.xy ) ) );
return vec4( edges, 0, 0 );
} );
// weights
const SMAASearchLength = Fn( ( [ searchTex, e, bias, scale ] ) => {
// Not required if searchTex accesses are set to point:
// float2 SEARCH_TEX_PIXEL_SIZE = 1.0 / float2(66.0, 33.0);
// e = float2(bias, 0.0) + 0.5 * SEARCH_TEX_PIXEL_SIZE + e * float2(scale, 1.0) * float2(64.0, 32.0) * SEARCH_TEX_PIXEL_SIZE;
const coord = vec2( e ).toVar();
coord.r = bias.add( coord.r.mul( scale ) );
return float( 255 ).mul( searchTex.sample( coord ) ).r;
} );
const SMAAArea = Fn( ( [ areaTex, dist, e1, e2, offset ] ) => {
// Rounding prevents precision errors of bilinear filtering:
let texcoord = float( SMAA_AREATEX_MAX_DISTANCE ).mul( float( 4 ).mul( vec2( e1, e2 ) ).round() ).add( dist );
// We do a scale and bias for mapping to texel space:
texcoord = SMAA_AREATEX_PIXEL_SIZE.mul( texcoord ).add( float( 0.5 ).mul( SMAA_AREATEX_PIXEL_SIZE ) );
// Move to proper place, according to the subpixel offset:
texcoord.y.addAssign( float( SMAA_AREATEX_SUBTEX_SIZE ).mul( offset ) );
return areaTex.sample( texcoord ).rg;
} );
const SMAASearchXLeft = Fn( ( [ edgesTex, searchTex, texcoord, end ] ) => {
/**
* @PSEUDO_GATHER4
* This texcoord has been offset by (-0.25, -0.125) in the vertex shader to
* sample between edge, thus fetching four edges in a row.
* Sampling with different offsets in each direction allows to disambiguate
* which edges are active from the four fetched ones.
*/
const e = vec2( 0.0, 1.0 ).toVar();
const coord = vec2( texcoord ).toVar();
Loop( { start: int( 0 ), end: int( SMAA_MAX_SEARCH_STEPS ), type: 'int', condition: '<' }, () => { // port note: Changed while to for
e.assign( edgesTex.sample( coord ).rg );
coord.subAssign( vec2( 2, 0 ).mul( this._invSize ) );
If( coord.x.lessThanEqual( end ).or( e.g.lessThanEqual( float( 0.8281 ) ).or( e.r.notEqual( float( 0 ) ) ) ), () => {
Break();
} );
} );
// We correct the previous (-0.25, -0.125) offset we applied:
coord.x.addAssign( float( 0.25 ).mul( this._invSize.x ) );
// The searches are bias by 1, so adjust the coords accordingly:
coord.x.addAssign( this._invSize.x );
// Disambiguate the length added by the last step:
coord.x.addAssign( float( 2 ).mul( this._invSize.x ) );
coord.x.subAssign( this._invSize.x.mul( SMAASearchLength( searchTex, e, 0, 0.5 ) ) );
return coord.x;
} );
const SMAASearchXRight = Fn( ( [ edgesTex, searchTex, texcoord, end ] ) => {
const e = vec2( 0.0, 1.0 ).toVar();
const coord = vec2( texcoord ).toVar();
Loop( { start: int( 0 ), end: int( SMAA_MAX_SEARCH_STEPS ), type: 'int', condition: '<' }, () => { // port note: Changed while to for
e.assign( edgesTex.sample( coord ).rg );
coord.addAssign( vec2( 2, 0 ).mul( this._invSize ) );
If( coord.x.greaterThanEqual( end ).or( e.g.lessThanEqual( float( 0.8281 ) ).or( e.r.notEqual( float( 0 ) ) ) ), () => {
Break();
} );
} );
coord.x.subAssign( float( 0.25 ).mul( this._invSize.x ) );
coord.x.subAssign( this._invSize.x );
coord.x.subAssign( float( 2 ).mul( this._invSize.x ) );
coord.x.addAssign( this._invSize.x.mul( SMAASearchLength( searchTex, e, 0.5, 0.5 ) ) );
return coord.x;
} );
const SMAASearchYUp = Fn( ( [ edgesTex, searchTex, texcoord, end ] ) => {
const e = vec2( 1.0, 0.0 ).toVar();
const coord = vec2( texcoord ).toVar();
Loop( { start: int( 0 ), end: int( SMAA_MAX_SEARCH_STEPS ), type: 'int', condition: '<' }, () => { // port note: Changed while to for
e.assign( edgesTex.sample( coord ).rg );
coord.addAssign( vec2( 0, - 2 ).mul( this._invSize ) );
If( coord.y.lessThanEqual( end ).or( e.r.lessThanEqual( float( 0.8281 ) ).or( e.g.notEqual( float( 0 ) ) ) ), () => {
Break();
} );
} );
coord.y.addAssign( float( 0.25 ).mul( this._invSize.y ) );
coord.y.addAssign( this._invSize.y );
coord.y.addAssign( float( 2 ).mul( this._invSize.y ) );
coord.y.subAssign( this._invSize.y.mul( SMAASearchLength( searchTex, e.gr, 0, 0.5 ) ) );
return coord.y;
} );
const SMAASearchYDown = Fn( ( [ edgesTex, searchTex, texcoord, end ] ) => {
const e = vec2( 1.0, 0.0 ).toVar();
const coord = vec2( texcoord ).toVar();
Loop( { start: int( 0 ), end: int( SMAA_MAX_SEARCH_STEPS ), type: 'int', condition: '<' }, () => { // port note: Changed while to for
e.assign( edgesTex.sample( coord ).rg );
coord.subAssign( vec2( 0, - 2 ).mul( this._invSize ) );
If( coord.y.greaterThanEqual( end ).or( e.r.lessThanEqual( float( 0.8281 ) ).or( e.g.notEqual( float( 0 ) ) ) ), () => {
Break();
} );
} );
coord.y.subAssign( float( 0.25 ).mul( this._invSize.y ) );
coord.y.subAssign( this._invSize.y );
coord.y.subAssign( float( 2 ).mul( this._invSize.y ) );
coord.y.addAssign( this._invSize.y.mul( SMAASearchLength( searchTex, e.gr, 0.5, 0.5 ) ) );
return coord.y;
} );
const SMAAWeightsVS = Fn( () => {
const vPixcoord = uvNode.xy.div( this._invSize );
// We will use these offsets for the searches later on (see @PSEUDO_GATHER4):
const vOffset0 = vec4( uvNode.xy, uvNode.xy ).add( vec4( this._invSize.xy, this._invSize.xy ).mul( vec4( - 0.25, - 0.125, 1.25, - 0.125 ) ) ).toVar();
const vOffset1 = vec4( uvNode.xy, uvNode.xy ).add( vec4( this._invSize.xy, this._invSize.xy ).mul( vec4( - 0.125, - 0.25, - 0.125, 1.25 ) ) ).toVar();
// And these for the searches, they indicate the ends of the loops:
const vOffset2 = vec4( vOffset0.xz, vOffset1.yw ).add( vec4( - 2.0, 2.0, - 2.0, 2.0 ).mul( vec4( this._invSize.xx, this._invSize.yy ) ).mul( float( SMAA_MAX_SEARCH_STEPS ) ) ).toVar();
varyingProperty( 'vec2', 'vPixcoord' ).assign( vPixcoord );
varyingProperty( 'vec4', 'vOffset0' ).assign( vOffset0 );
varyingProperty( 'vec4', 'vOffset1' ).assign( vOffset1 );
varyingProperty( 'vec4', 'vOffset2' ).assign( vOffset2 );
return modelViewProjection;
} );
const SMAAWeightsFS = Fn( () => {
const vPixcoord = varyingProperty( 'vec2', 'vPixcoord' );
const vOffset0 = varyingProperty( 'vec4', 'vOffset0' );
const vOffset1 = varyingProperty( 'vec4', 'vOffset1' );
const vOffset2 = varyingProperty( 'vec4', 'vOffset2' );
const weights = vec4( 0.0, 0.0, 0.0, 0.0 ).toVar();
const subsampleIndices = vec4( 0.0, 0.0, 0.0, 0.0 ).toVar();
const e = this._edgesTextureUniform.sample( uvNode ).rg.toVar();
If( e.g.greaterThan( float( 0 ) ), () => { // Edge at north
let d = vec2().toVar();
// Find the distance to the left:
const coordsLeft = vec2().toVar();
coordsLeft.x = SMAASearchXLeft( this._edgesTextureUniform, this._searchTextureUniform, vOffset0.xy, vOffset2.x );
coordsLeft.y = vOffset1.y; // offset[1].y = texcoord.y - 0.25 * resolution.y (@CROSSING_OFFSET)
d.x = coordsLeft.x;
// Now fetch the left crossing edges, two at a time using bilinear
// filtering. Sampling at -0.25 (see @CROSSING_OFFSET) enables to
// discern what value each edge has:
const e1 = this._edgesTextureUniform.sample( coordsLeft ).r.toVar();
// Find the distance to the right:
const coordsRight = vec2().toVar();
coordsRight.x = SMAASearchXRight( this._edgesTextureUniform, this._searchTextureUniform, vOffset0.zw, vOffset2.y );
coordsRight.y = vOffset1.y;
d.y = coordsRight.x;
// We want the distances to be in pixel units (doing this here allow to
// better interleave arithmetic and memory accesses):
d = d.div( this._invSize.x ).sub( vPixcoord.x );
// SMAAArea below needs a sqrt, as the areas texture is compressed quadratically:
const sqrt_d = sqrt( abs( d ) );
// Fetch the right crossing edges:
const e2 = this._edgesTextureUniform.sample( coordsRight.add( vec2( 1, 0 ).mul( this._invSize ) ) ).r.toVar();
weights.r = e2;
// Get the area for this direction:
weights.rg = SMAAArea( this._areaTextureUniform, sqrt_d, e1, e2, float( subsampleIndices.y ) );
} );
If( e.r.greaterThan( float( 0 ) ), () => { // Edge at west
let d = vec2().toVar();
// Find the distance to the top:
const coordsUp = vec2().toVar();
coordsUp.y = SMAASearchYUp( this._edgesTextureUniform, this._searchTextureUniform, vOffset1.xy, vOffset2.z );
coordsUp.x = vOffset0.x; // offset[1].x = texcoord.x - 0.25 * resolution.x;
d.x = coordsUp.y;
// Fetch the top crossing edges:
const e1 = this._edgesTextureUniform.sample( coordsUp ).g.toVar();
// Find the distance to the bottom:
const coordsDown = vec2().toVar();
coordsDown.y = SMAASearchYDown( this._edgesTextureUniform, this._searchTextureUniform, vOffset1.zw, vOffset2.w );
coordsDown.x = vOffset0.x;
d.y = coordsDown.y;
// We want the distances to be in pixel units:
d = d.div( this._invSize.y ).sub( vPixcoord.y );
// SMAAArea below needs a sqrt, as the areas texture is compressed quadratically:
const sqrt_d = sqrt( abs( d ) );
// Fetch the bottom crossing edges:
const e2 = this._edgesTextureUniform.sample( coordsDown.add( vec2( 0, 1 ).mul( this._invSize ) ) ).g.toVar();
// Get the area for this direction:
weights.ba = SMAAArea( this._areaTextureUniform, sqrt_d, e1, e2, float( subsampleIndices.x ) );
} );
return weights;
} );
// blend
const SMAABlendVS = Fn( () => {
//const vOffset0 = vec4( uvNode.xy, uvNode.xy ).add( vec4( this._invSize.xy, this._invSize.xy ).mul( vec4( - 1.0, 0.0, 0.0, - 1.0 ) ) );
const vOffset1 = vec4( uvNode.xy, uvNode.xy ).add( vec4( this._invSize.xy, this._invSize.xy ).mul( vec4( 1.0, 0.0, 0.0, 1.0 ) ) );
//varyingProperty( 'vec4', 'vOffset0' ).assign( vOffset0 );
varyingProperty( 'vec4', 'vOffset1' ).assign( vOffset1 );
return modelViewProjection;
} );
const SMAABlendFS = Fn( () => {
//const vOffset0 = varyingProperty( 'vec4', 'vOffset0' );
const vOffset1 = varyingProperty( 'vec4', 'vOffset1' );
const result = vec4().toVar();
// Fetch the blending weights for current pixel:
const a = vec4().toVar();
a.xz = this._weightsTextureUniform.sample( uvNode ).xz;
a.y = this._weightsTextureUniform.sample( vOffset1.zw ).g;
a.w = this._weightsTextureUniform.sample( vOffset1.xy ).a;
// Is there any blending weight with a value greater than 0.0?
If( dot( a, vec4( 1.0 ) ).lessThan( 1e-5 ), () => { // Edge at north
result.assign( this.textureNode.sample( uvNode ) );
} ).Else( () => {
// Up to 4 lines can be crossing a pixel (one through each edge). We
// favor blending by choosing the line with the maximum weight for each
// direction:
const offset = vec2().toVar();
offset.x = a.a.greaterThan( a.b ).select( a.a, a.b.negate() ); // left vs. right
offset.y = a.g.greaterThan( a.r ).select( a.g, a.r.negate() ); // top vs. bottom
// Then we go in the direction that has the maximum weight:
If( abs( offset.x ).greaterThan( abs( offset.y ) ), () => { // horizontal vs. vertical
offset.y.assign( 0 );
} ).Else( () => {
offset.x.assign( 0 );
} );
// Fetch the opposite color and lerp by hand:
const C = this.textureNode.sample( uvNode ).toVar();
const texcoord = vec2( uvNode ).toVar();
texcoord.addAssign( sign( offset ).mul( this._invSize ) );
const Cop = this.textureNode.sample( texcoord ).toVar();
const s = abs( offset.x ).greaterThan( abs( offset.y ) ).select( abs( offset.x ), abs( offset.y ) ).toVar();
const mixed = mix( C, Cop, s );
result.assign( mixed );
} );
return result;
} );
this._materialEdges.vertexNode = SMAAEdgeDetectionVS().context( builder.getSharedContext() );
this._materialEdges.fragmentNode = SMAAEdgeDetectionFS().context( builder.getSharedContext() );
this._materialEdges.needsUpdate = true;
this._materialWeights.vertexNode = SMAAWeightsVS().context( builder.getSharedContext() );
this._materialWeights.fragmentNode = SMAAWeightsFS().context( builder.getSharedContext() );
this._materialWeights.needsUpdate = true;
this._materialBlend.vertexNode = SMAABlendVS().context( builder.getSharedContext() );
this._materialBlend.fragmentNode = SMAABlendFS().context( builder.getSharedContext() );
this._materialBlend.needsUpdate = true;
return this._textureNode;
}
/**
* Frees internal resources. This method should be called
* when the effect is no longer required.
*/
dispose() {
this._renderTargetEdges.dispose();
this._renderTargetWeights.dispose();
this._renderTargetBlend.dispose();
this._areaTexture.dispose();
this._searchTexture.dispose();
this._materialEdges.dispose();
this._materialWeights.dispose();
this._materialBlend.dispose();
}
/**
* Returns the area texture as a Base64 string.
*
* @private
* @return {string} The area texture.
*/
_getAreaTexture() {
return 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAKAAAAIwCAIAAACOVPcQAACBeklEQVR42u39W4xlWXrnh/3WWvuciIzMrKxrV8/0rWbY0+SQFKcb4owIkSIFCjY9AC1BT/LYBozRi+EX+cV+8IMsYAaCwRcBwjzMiw2jAWtgwC8WR5Q8mDFHZLNHTarZGrLJJllt1W2qKrsumZWZcTvn7L3W54e1vrXX3vuciLPPORFR1XE2EomorB0nVuz//r71re/y/1eMvb4Cb3N11xV/PP/2v4UBAwJG/7H8urx6/25/Gf8O5hypMQ0EEEQwAqLfoN/Z+97f/SW+/NvcgQk4sGBJK6H7N4PFVL+K+e0N11yNfkKvwUdwdlUAXPHHL38oa15f/i/46Ih6SuMSPmLAYAwyRKn7dfMGH97jaMFBYCJUgotIC2YAdu+LyW9vvubxAP8kAL8H/koAuOKP3+q6+xGnd5kdYCeECnGIJViwGJMAkQKfDvB3WZxjLKGh8VSCCzhwEWBpMc5/kBbjawT4HnwJfhr+pPBIu7uu+OOTo9vsmtQcniMBGkKFd4jDWMSCRUpLjJYNJkM+IRzQ+PQvIeAMTrBS2LEiaiR9b/5PuT6Ap/AcfAFO4Y3dA3DFH7/VS+M8k4baEAQfMI4QfbVDDGIRg7GKaIY52qAjTAgTvGBAPGIIghOCYAUrGFNgzA7Q3QhgCwfwAnwe5vDejgG44o/fbm1C5ZlYQvQDARPAIQGxCWBM+wWl37ZQESb4gImexGMDouhGLx1Cst0Saa4b4AqO4Hk4gxo+3DHAV/nx27p3JziPM2pVgoiia5MdEzCGULprIN7gEEeQ5IQxEBBBQnxhsDb5auGmAAYcHMA9eAAz8PBol8/xij9+C4Djlim4gJjWcwZBhCBgMIIYxGAVIkH3ZtcBuLdtRFMWsPGoY9rN+HoBji9VBYdwD2ZQg4cnO7OSq/z4rU5KKdwVbFAjNojCQzTlCLPFSxtamwh2jMUcEgg2Wm/6XgErIBhBckQtGN3CzbVacERgCnfgLswhnvqf7QyAq/z4rRZm1YglYE3affGITaZsdIe2FmMIpnOCap25I6jt2kCwCW0D1uAD9sZctNGXcQIHCkINDQgc78aCr+zjtw3BU/ijdpw3zhCwcaONwBvdeS2YZKkJNJsMPf2JKEvC28RXxxI0ASJyzQCjCEQrO4Q7sFArEzjZhaFc4cdv+/JFdKULM4px0DfUBI2hIsy06BqLhGTQEVdbfAIZXYMPesq6VoCHICzUyjwInO4Y411//LYLs6TDa9wvg2CC2rElgAnpTBziThxaL22MYhzfkghz6GAs2VHbbdM91VZu1MEEpupMMwKyVTb5ij9+u4VJG/5EgEMMmFF01cFai3isRbKbzb+YaU/MQbAm2XSMoUPAmvZzbuKYRIFApbtlrfFuUGd6vq2hXNnH78ZLh/iFhsQG3T4D1ib7k5CC6vY0DCbtrohgLEIClXiGtl10zc0CnEGIhhatLBva7NP58Tvw0qE8yWhARLQ8h4+AhQSP+I4F5xoU+VilGRJs6wnS7ruti/4KvAY/CfdgqjsMy4pf8fodQO8/gnuX3f/3xi3om1/h7THr+co3x93PP9+FBUfbNUjcjEmhcrkT+8K7ml7V10Jo05mpIEFy1NmCJWx9SIKKt+EjAL4Ez8EBVOB6havuT/rByPvHXK+9zUcfcbb254+9fydJknYnRr1oGfdaiAgpxu1Rx/Rek8KISftx3L+DfsLWAANn8Hvw0/AFeAGO9DFV3c6D+CcWbL8Dj9e7f+T1k8AZv/d7+PXWM/Z+VvdCrIvuAKO09RpEEQJM0Ci6+B4xhTWr4cZNOvhktabw0ta0rSJmqz3Yw5/AKXwenod7cAhTmBSPKf6JBdvH8IP17h95pXqw50/+BFnj88fev4NchyaK47OPhhtI8RFSvAfDSNh0Ck0p2gLxGkib5NJj/JWCr90EWQJvwBzO4AHcgztwAFN1evHPUVGwfXON+0debT1YeGON9Yy9/63X+OguiwmhIhQhD7l4sMqlG3D86Suc3qWZ4rWjI1X7u0Ytw6x3rIMeIOPDprfe2XzNgyj6PahhBjO4C3e6puDgXrdg+/5l948vF3bqwZetZ+z9Rx9zdIY5pInPK4Nk0t+l52xdK2B45Qd87nM8fsD5EfUhIcJcERw4RdqqH7Yde5V7m1vhNmtedkz6EDzUMF/2jJYWbC+4fzzA/Y+/8PPH3j9dcBAPIRP8JLXd5BpAu03aziOL3VVHZzz3CXWDPWd+SH2AnxIqQoTZpo9Ckc6HIrFbAbzNmlcg8Ag8NFDDAhbJvTBZXbC94P7t68EXfv6o+21gUtPETU7bbkLxvNKRFG2+KXzvtObonPP4rBvsgmaKj404DlshFole1Glfh02fE7bYR7dZ82oTewIBGn1Md6CG6YUF26X376oevOLzx95vhUmgblI6LBZwTCDY7vMq0op5WVXgsObOXJ+1x3qaBl9j1FeLxbhU9w1F+Wiba6s1X/TBz1LnUfuYDi4r2C69f1f14BWfP+p+W2GFKuC9phcELMYRRLur9DEZTUdEH+iEqWdaM7X4WOoPGI+ZYD2+wcQ+y+ioHUZ9dTDbArzxmi/bJI9BND0Ynd6lBdve/butBw8+f/T9D3ABa3AG8W3VPX4hBin+bj8dMMmSpp5pg7fJ6xrBFE2WQQEWnV8Qg3FbAWzYfM1rREEnmvkN2o1+acG2d/9u68GDzx91v3mAjb1zkpqT21OipPKO0b9TO5W0nTdOmAQm0TObts3aBKgwARtoPDiCT0gHgwnbArzxmtcLc08HgF1asN0C4Ms/fvD5I+7PhfqyXE/b7RbbrGyRQRT9ARZcwAUmgdoz0ehJ9Fn7QAhUjhDAQSw0bV3T3WbNa59jzmiP6GsWbGXDX2ytjy8+f9T97fiBPq9YeLdBmyuizZHaqXITnXiMUEEVcJ7K4j3BFPurtB4bixW8wTpweL8DC95szWMOqucFYGsWbGU7p3TxxxefP+r+oTVktxY0v5hbq3KiOKYnY8ddJVSBxuMMVffNbxwIOERShst73HZ78DZrHpmJmH3K6sGz0fe3UUj0eyRrSCGTTc+rjVNoGzNSv05srAxUBh8IhqChiQgVNIIBH3AVPnrsnXQZbLTm8ammv8eVXn/vWpaTem5IXRlt+U/LA21zhSb9cye6jcOfCnOwhIAYXAMVTUNV0QhVha9xjgA27ODJbLbmitt3tRN80lqG6N/khgot4ZVlOyO4WNg3OIMzhIZQpUEHieg2im6F91hB3I2tubql6BYNN9Hj5S7G0G2tahslBWKDnOiIvuAEDzakDQKDNFQT6gbn8E2y4BBubM230YIpBnDbMa+y3dx0n1S0BtuG62lCCXwcY0F72T1VRR3t2ONcsmDjbmzNt9RFs2LO2hQNyb022JisaI8rAWuw4HI3FuAIhZdOGIcdjLJvvObqlpqvWTJnnQbyi/1M9O8UxWhBs//H42I0q1Yb/XPGONzcmm+ri172mHKvZBpHkJaNJz6v9jxqiklDj3U4CA2ugpAaYMWqNXsdXbmJNd9egCnJEsphXNM+MnK3m0FCJ5S1kmJpa3DgPVbnQnPGWIDspW9ozbcO4K/9LkfaQO2KHuqlfFXSbdNzcEcwoqNEFE9zcIXu9/6n/ym/BC/C3aJLzEKPuYVlbFnfhZ8kcWxV3dbv4bKl28566wD+8C53aw49lTABp9PWbsB+knfc/Li3eVizf5vv/xmvnPKg5ihwKEwlrcHqucuVcVOxEv8aH37E3ZqpZypUulrHEtIWKUr+txHg+ojZDGlwnqmkGlzcVi1dLiNSJiHjfbRNOPwKpx9TVdTn3K05DBx4psIk4Ei8aCkJahRgffk4YnEXe07T4H2RR1u27E6wfQsBDofUgjFUFnwC2AiVtA+05J2zpiDK2Oa0c5fmAecN1iJzmpqFZxqYBCYhFTCsUNEmUnIcZ6aEA5rQVhEywG6w7HSW02XfOoBlQmjwulOFQAg66SvJblrTEX1YtJ3uG15T/BH1OfOQeuR8g/c0gdpT5fx2SKbs9EfHTKd
}
/**
* Returns the search texture as a Base64 string..
*
* @private
* @return {string} The search texture.
*/
_getSearchTexture() {
return 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEIAAAAhCAAAAABIXyLAAAAAOElEQVRIx2NgGAWjYBSMglEwEICREYRgFBZBqDCSLA2MGPUIVQETE9iNUAqLR5gIeoQKRgwXjwAAGn4AtaFeYLEAAAAASUVORK5CYII=';
}
}
export default SMAANode;
/**
* TSL function for creating a SMAA node for anti-aliasing via post processing.
*
* @tsl
* @function
* @param {Node<vec4>} node - The node that represents the input of the effect.
* @returns {SMAANode}
*/
export const smaa = ( node ) => nodeObject( new SMAANode( convertToTexture( node ) ) );