webgl_depth_texture
对应 three.js 示例地址 。
仅需关注 init
函数的内容,其他部分都是示例小程序所使用的描述配置。
js
import * as THREE from "three";
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls.js";
/** @type {import("@minisheeep/mp-three-examples").OfficialExampleInfo} */
const exampleInfo = {
name: "webgl_depth_texture",
useLoaders: [],
info: [
[
{
tag: "a",
link: "https://threejs.org",
content: "threejs"
},
{
tag: "text",
content: "webgl - depth texture"
}
],
[
{
tag: "text",
content: "Stores render target depth in a texture attachment."
}
],
[
{
tag: "text",
content: "Created by"
},
{
tag: "a",
link: "http://twitter.com/mattdesl",
content: "@mattdesl"
},
{
tag: "text",
content: "."
}
]
],
init: ({ window, canvas, GUI, Stats, needToDispose, useFrame }) => {
let camera, scene, renderer, controls, stats;
let target;
let postScene, postCamera, postMaterial;
const params = {
format: THREE.DepthFormat,
type: THREE.UnsignedShortType,
samples: 0
};
const formats = {
DepthFormat: THREE.DepthFormat,
DepthStencilFormat: THREE.DepthStencilFormat
};
const types = {
UnsignedShortType: THREE.UnsignedShortType,
UnsignedIntType: THREE.UnsignedIntType,
FloatType: THREE.FloatType
};
init();
function init() {
renderer = new THREE.WebGLRenderer({ canvas });
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setAnimationLoop(animate);
stats = new Stats(renderer);
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 0.01, 50);
camera.position.z = 4;
controls = new OrbitControls(camera, renderer.domElement);
controls.enableDamping = true;
setupRenderTarget();
setupScene();
setupPost();
onWindowResize();
window.addEventListener("resize", onWindowResize);
const gui = new GUI({ width: 300 });
gui.add(params, "format", formats).onChange(setupRenderTarget);
gui.add(params, "type", types).onChange(setupRenderTarget);
gui.add(params, "samples", 0, 16, 1).onChange(setupRenderTarget);
gui.open();
needToDispose(renderer, scene);
}
function setupRenderTarget() {
if (target) target.dispose();
const format = parseInt(params.format);
const type = parseInt(params.type);
const samples = parseInt(params.samples);
const dpr = renderer.getPixelRatio();
target = new THREE.WebGLRenderTarget(window.innerWidth * dpr, window.innerHeight * dpr);
target.texture.minFilter = THREE.NearestFilter;
target.texture.magFilter = THREE.NearestFilter;
target.stencilBuffer = format === THREE.DepthStencilFormat ? true : false;
target.samples = samples;
target.depthTexture = new THREE.DepthTexture();
target.depthTexture.format = format;
target.depthTexture.type = type;
}
function setupPost() {
postCamera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1);
postMaterial = new THREE.ShaderMaterial({
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`,
fragmentShader: `
#include <packing>
varying vec2 vUv;
uniform sampler2D tDiffuse;
uniform sampler2D tDepth;
uniform float cameraNear;
uniform float cameraFar;
float readDepth( sampler2D depthSampler, vec2 coord ) {
float fragCoordZ = texture2D( depthSampler, coord ).x;
float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );
return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
}
void main() {
//vec3 diffuse = texture2D( tDiffuse, vUv ).rgb;
float depth = readDepth( tDepth, vUv );
gl_FragColor.rgb = 1.0 - vec3( depth );
gl_FragColor.a = 1.0;
}
`,
uniforms: {
cameraNear: { value: camera.near },
cameraFar: { value: camera.far },
tDiffuse: { value: null },
tDepth: { value: null }
}
});
const postPlane = new THREE.PlaneGeometry(2, 2);
const postQuad = new THREE.Mesh(postPlane, postMaterial);
postScene = new THREE.Scene();
postScene.add(postQuad);
needToDispose(postScene);
}
function setupScene() {
scene = new THREE.Scene();
const geometry = new THREE.TorusKnotGeometry(1, 0.3, 128, 64);
const material = new THREE.MeshBasicMaterial({ color: "blue" });
const count = 50;
const scale = 5;
for (let i = 0; i < count; i++) {
const r = Math.random() * 2 * Math.PI;
const z = Math.random() * 2 - 1;
const zScale = Math.sqrt(1 - z * z) * scale;
const mesh = new THREE.Mesh(geometry, material);
mesh.position.set(Math.cos(r) * zScale, Math.sin(r) * zScale, z * scale);
mesh.rotation.set(Math.random(), Math.random(), Math.random());
scene.add(mesh);
}
}
function onWindowResize() {
const aspect = window.innerWidth / window.innerHeight;
camera.aspect = aspect;
camera.updateProjectionMatrix();
const dpr = renderer.getPixelRatio();
target.setSize(window.innerWidth * dpr, window.innerHeight * dpr);
renderer.setSize(window.innerWidth, window.innerHeight);
}
function animate() {
renderer.setRenderTarget(target);
renderer.render(scene, camera);
postMaterial.uniforms.tDiffuse.value = target.texture;
postMaterial.uniforms.tDepth.value = target.depthTexture;
renderer.setRenderTarget(null);
renderer.render(postScene, postCamera);
controls.update();
stats.update();
}
}
};
export {
exampleInfo as default
};
ts
import { Loader, TypedArray } from 'three';
/**
* 官网示例的多端使用封装把版本
* */
export interface OfficialExampleInfo extends MiniProgramMeta {
/*** 示例名称(保持和官网一致)*/
name: string;
/** main */
init: (context: LoadContext) => void;
}
export interface LoadContext {
//为了减少官方代码的改动,实际上等同于 canvas
window: EventTarget & { innerWidth: number; innerHeight: number; devicePixelRatio: number };
/** HTMLCanvasElement */
canvas: any;
/** https://www.npmjs.com/package/lil-gui */
GUI: any;
/**
* https://www.npmjs.com/package/stats.js
* 也可以使用其他受支持的版本
* */
Stats: any;
/** 收集需要 dispose 的对象(官方示例没有处理这部分)*/
needToDispose: (...objs: any[]) => void | ((fromFn: () => any[]) => void);
/**基于 raq 的通用封装 */
useFrame(animateFunc: (/** ms */ delta: number) => void): { cancel: () => void };
/** 显示加载模态框 */
requestLoading(text?: string): Promise<void>;
/** 隐藏加载模态框*/
cancelLoading(): void;
/** 保存文件的通用封装*/
saveFile(
fileName: string,
data: ArrayBuffer | TypedArray | DataView | string
): Promise<string | null>;
/** 示例使用 DracoDecoder 时的资源路径 */
DecoderPath: {
GLTF: string;
STANDARD: string;
};
/** 为资源路径拼上 CDN 前缀 */
withCDNPrefix(path: string): string;
/**
* 在小程序中应使用 import { VideoTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/VideoTexture.js';
* 正常情况(web) 可直接使用 THREE.VideoTexture
* */
getVideoTexture(videoOptions: VideoOptions): Promise<[{ isVideoTexture: true }, video: any]>;
/**
* 在小程序中应使用 import { CameraTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/CameraTexture.js';
* 正常情况(web) 可参考示例 webgl_materials_video_webcam
* */
getCameraTexture(): { isVideoTexture: true };
/** 用于动态修改 info 中的占位符*/
bindInfoText(template: `$${string}$`, initValue?: string): { value: string };
/** 分屏控件对应的事件回调 */
onSlideStart(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideEnd(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideChange(handle: (offset: number, boxSize: number) => void): void;
}
export type VideoOptions = {
src: string;
/** 相当于 HTMLVideoElement 的 naturalWidth (小程序中获取不到)*/
width: number;
/** 相当于 HTMLVideoElement 的 naturalHeight (小程序中获取不到)*/
height: number;
loop?: boolean;
autoplay?: boolean;
muted?: boolean;
};
/** 示例小程序中使用的一些配置 */
export interface MiniProgramMeta {
/** 用于统计加载相关信息 */
useLoaders: Loader[];
/** 通用 info */
info: TagItem[][];
/** 特殊 info */
infoPanel?: {
left?: [string, string][];
right?: [string, string][];
};
/** 分屏控件配置 */
needSlider?: {
/** 方向 */
direction?: 'horizontal' | 'vertical';
/** 初始偏移 0-100 */
initPosition?: number;
};
/** 操作摇杆控件 */
needArrowControls?: boolean;
/** 默认需要的画布类型 */
canvasType?: '2d' | 'webgl' | 'webgl2';
/** 为保持效果一致所需要的画布样式 */
canvasStyle?: {
bgColor?: string;
width?: number | string;
height?: number | string;
};
/** 部分示例需要在加载前进行一些提示 */
initAfterConfirm?: {
/**
* 提示类型
* @default 'default'
* */
type?: 'warning' | 'default';
text: string[];
};
}
export interface BaseTag<T extends string> {
tag: T;
content: string;
}
export interface ATag extends BaseTag<'a'> {
link: string;
}
export type TextTag = BaseTag<'text'>;
export type TagItem = TextTag | ATag;