当前位置: 首页 > 工具软件 > Jedi-js > 使用案例 >

three.js扫光-后期处理

归星驰
2023-12-01

添加深度贴图

 const { width, height } = helper.renderer.getDrawingBufferSize(
  new THREE.Vector2()
 );

 const effectComposer = new EffectComposer(helper.renderer);

 const depthTexture = new THREE.DepthTexture(width, height);
 effectComposer.readBuffer.depthBuffer = true;
 effectComposer.readBuffer.depthTexture = depthTexture;

自定义通道

const shaderPass = new ShaderPass(
        new THREE.ShaderMaterial({
            uniforms: {
                time: { value: 0 },
                tDiffuse: { value: null },
                depthTexture: { value: depthTexture },
                scanTexture: {
                    value: new THREE.TextureLoader().load(
                        "/textures/Carbon.png",
                        (t) => {
                            t.repeat.set(10, 10);
                            t.wrapS = THREE.RepeatWrapping;
                            t.wrapT = THREE.RepeatWrapping;
                        }
                    ),
                },
                uProjectionInverse: {
                    value: helper.camera.projectionMatrixInverse,
                },
                uMatrixWorld: { value: helper.camera.matrixWorld },
                cameraNear: { value: helper.camera.near },
                cameraFar: { value: helper.camera.far },
            },
            vertexShader,
            fragmentShader,
        })
    );

获取世界位置核心代码

vec3 WorldPosFromDepth(float depth) {
    float z = (depth - 0.5) * 2.;

    vec4 clipSpacePosition = vec4(vPosition.xy, z, 1.0);
    vec4 viewSpacePosition = uProjectionInverse * clipSpacePosition;

    viewSpacePosition /= viewSpacePosition.w;

    vec4 worldSpacePosition = uMatrixWorld * viewSpacePosition;

    return worldSpacePosition.xyz;
}
 // 上一次的渲染结果 shaderPass不传id默认tDiffuse
    vec4 diffuse = texture2D(tDiffuse, vUv);
    // 深度纹理 获取深度信息 用来确定点的位置
    float depth = texture2D(depthTexture, vUv).x;
    // float depth = 0.;
    // 规范化设备坐标系 ndc (Normalized Device Coordinates)
    vec4 ndc = vec4(vPosition.x, vPosition.y, ((depth - 0.5) * 2.), 1.);
    // 根据视图中的位置和深度逆向MVP (ModelViewProjectionMatrix) 以获取真实渲染的位置
    vec4 realPosition = uMatrixWorld * uProjectionInverse * ndc;
    // 由于透视相机视图区域是一个截锥体 在乘以矩阵后,结果不在同一个射影空间上(这意味着 w 分量不是每个顶点的 1)
    // 为了完成转换,我们需要将向量的每个分量除以 w 分量本身
    // 这一步正常渲染时在GPU中做 我们复原需要手动处理
    realPosition /= realPosition.w;
    // 根据水平方向向量获取距离是否在扫射区域内
    // 如果需要扫射区域是一个球形可根据xyz获取 但高于球型半径的物体无法被扫中
    float dis = distance(realPosition.xz, vec2(0, 0));


#include <packing>

float getViewZ(const in float depth) {
    return perspectiveDepthToViewZ(depth, virtualCameraNear, virtualCameraFar);
}

vec3 getViewPosition(const in vec2 uv, const in float depth/*clip space*/, const in float clipW) {
    vec4 clipPosition = vec4((vec3(uv, depth) - 0.5) * 2.0, 1.0);//ndc
    clipPosition *= clipW; //clip
    return (virtualCameraProjectionMatrixInverse * clipPosition).xyz;//view
}

// main 
 vec4 depthT = texture2D(depthTexture, vUv);
    float depth = depthT.r;
    float viewZ = getViewZ(depth);
    float clipW = virtualCameraProjectionMatrix[2][3] * viewZ + virtualCameraProjectionMatrix[3][3];
    vec3 viewPosition = getViewPosition(vUv, depth, clipW);
    vec3 worldPosition = (virtualCameraMatrixWorld * vec4(viewPosition, 1)).xyz;

片原着色器

varying vec2 vUv;
varying vec3 vPosition;
uniform sampler2D tDiffuse;
uniform sampler2D depthTexture;
uniform sampler2D scanTexture;

uniform mat4 uProjectionInverse;
uniform mat4 uMatrixWorld;

uniform float time;

vec3 WorldPosFromDepth(float depth) {
    float z = (depth - 0.5) * 2.;

    vec4 clipSpacePosition = vec4(vPosition.xy, z, 1.0);
    vec4 viewSpacePosition = uProjectionInverse * clipSpacePosition;

    viewSpacePosition /= viewSpacePosition.w;

    vec4 worldSpacePosition = uMatrixWorld * viewSpacePosition;

    return worldSpacePosition.xyz;
}

void main() {
    vec4 base = texture2D(tDiffuse, vUv);
    float depth = texture2D(depthTexture, vUv).r;
    vec3 pos = WorldPosFromDepth(depth);
    float dis = distance(pos.xz, vec2(0, 0));
    vec3 color = vec3(base);

    if(dis < 15.) {

        vec3 scanT = texture2D(scanTexture, pos.xz).rgb;

        float wave = fract((dis - time) / 4.);

        if(wave > 0.7 && wave < 1.) {
            float p = (wave - 0.7) / 0.3;
            color = mix(color, scanT + 0.1, p * (1. - (dis / 15.)));
        }
    // if(dis > innerCircle && dis < outerCircle) {
    //     float p = (dis - innerCircle) / (outerCircle - innerCircle);
    //     color = mix(color, scanT, p);
    // }
    }

    gl_FragColor = vec4(color, 1.);
}

 类似资料: