webgl_multiple_rendertargets
对应 three.js 示例地址 。
仅需关注 init
函数的内容,其他部分都是示例小程序所使用的描述配置。
js
import * as THREE from "three";
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls.js";
/** @type {import("@minisheeep/mp-three-examples").OfficialExampleInfo} */
const exampleInfo = {
name: "webgl_multiple_rendertargets",
useLoaders: [],
info: [
[
{
tag: "a",
link: "https://threejs.org",
content: "threejs"
},
{
tag: "text",
content: "webgl - Multiple RenderTargets"
}
]
],
init: ({ window, canvas, GUI, Stats, needToDispose, useFrame }) => {
let camera, scene, renderer, controls;
let renderTarget;
let postScene, postCamera;
const parameters = {
samples: 4,
wireframe: false
};
const gui = new GUI();
gui.add(parameters, "samples", 0, 4).step(1);
gui.add(parameters, "wireframe");
gui.onChange(render);
init();
function init() {
renderer = new THREE.WebGLRenderer({ canvas });
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderTarget = new THREE.WebGLRenderTarget(
window.innerWidth * window.devicePixelRatio,
window.innerHeight * window.devicePixelRatio,
{
count: 2,
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter
}
);
renderTarget.textures[0].name = "diffuse";
renderTarget.textures[1].name = "normal";
scene = new THREE.Scene();
scene.background = new THREE.Color(2236962);
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 0.1, 50);
camera.position.z = 4;
const loader = new THREE.TextureLoader();
const diffuse = loader.load("textures/hardwood2_diffuse.jpg", render);
diffuse.wrapS = THREE.RepeatWrapping;
diffuse.wrapT = THREE.RepeatWrapping;
diffuse.colorSpace = THREE.SRGBColorSpace;
scene.add(
new THREE.Mesh(
new THREE.TorusKnotGeometry(1, 0.3, 128, 32),
new THREE.RawShaderMaterial({
name: "G-Buffer Shader",
vertexShader: `
in vec3 position;
in vec3 normal;
in vec2 uv;
out vec3 vNormal;
out vec2 vUv;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform mat3 normalMatrix;
void main() {
vUv = uv;
// get smooth normals
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
vec3 transformedNormal = normalMatrix * normal;
vNormal = normalize( transformedNormal );
gl_Position = projectionMatrix * mvPosition;
}
`,
fragmentShader: `
precision highp float;
precision highp int;
layout(location = 0) out vec4 gColor;
layout(location = 1) out vec4 gNormal;
uniform sampler2D tDiffuse;
uniform vec2 repeat;
in vec3 vNormal;
in vec2 vUv;
void main() {
// write color to G-Buffer
gColor = texture( tDiffuse, vUv * repeat );
// write normals to G-Buffer
gNormal = vec4( normalize( vNormal ), 0.0 );
}
`,
uniforms: {
tDiffuse: { value: diffuse },
repeat: { value: new THREE.Vector2(5, 0.5) }
},
glslVersion: THREE.GLSL3
})
)
);
postScene = new THREE.Scene();
postCamera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1);
postScene.add(
new THREE.Mesh(
new THREE.PlaneGeometry(2, 2),
new THREE.RawShaderMaterial({
name: "Post-FX Shader",
vertexShader: `
in vec3 position;
in vec2 uv;
out vec2 vUv;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`,
fragmentShader: `
precision highp float;
precision highp int;
vec4 LinearTosRGB( in vec4 value ) {
return vec4( mix( pow( value.rgb, vec3( 0.41666 ) ) * 1.055 - vec3( 0.055 ), value.rgb * 12.92, vec3( lessThanEqual( value.rgb, vec3( 0.0031308 ) ) ) ), value.a );
}
layout(location = 0) out vec4 pc_FragColor;
in vec2 vUv;
uniform sampler2D tDiffuse;
uniform sampler2D tNormal;
void main() {
vec4 diffuse = texture( tDiffuse, vUv );
vec4 normal = texture( tNormal, vUv );
pc_FragColor = mix( diffuse, normal, step( 0.5, vUv.x ) );
pc_FragColor.a = 1.0;
pc_FragColor = LinearTosRGB( pc_FragColor );
}
`,
uniforms: {
tDiffuse: { value: renderTarget.textures[0] },
tNormal: { value: renderTarget.textures[1] }
},
glslVersion: THREE.GLSL3
})
)
);
controls = new OrbitControls(camera, renderer.domElement);
controls.addEventListener("change", render);
canvas.addEventListener("resize", onWindowResize);
needToDispose(renderer, scene, controls);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
const dpr = renderer.getPixelRatio();
renderTarget.setSize(window.innerWidth * dpr, window.innerHeight * dpr);
render();
}
function render() {
renderTarget.samples = parameters.samples;
scene.traverse(function(child) {
if (child.material !== void 0) {
child.material.wireframe = parameters.wireframe;
}
});
renderer.setRenderTarget(renderTarget);
renderer.render(scene, camera);
renderer.setRenderTarget(null);
renderer.render(postScene, postCamera);
}
}
};
export {
exampleInfo as default
};
ts
import { Loader, TypedArray } from 'three';
/**
* 官网示例的多端使用封装把版本
* */
export interface OfficialExampleInfo extends MiniProgramMeta {
/*** 示例名称(保持和官网一致)*/
name: string;
/** main */
init: (context: LoadContext) => void;
}
export interface LoadContext {
//为了减少官方代码的改动,实际上等同于 canvas
window: EventTarget & { innerWidth: number; innerHeight: number; devicePixelRatio: number };
/** HTMLCanvasElement */
canvas: any;
/** https://www.npmjs.com/package/lil-gui */
GUI: any;
/**
* https://www.npmjs.com/package/stats.js
* 也可以使用其他受支持的版本
* */
Stats: any;
/** 收集需要 dispose 的对象(官方示例没有处理这部分)*/
needToDispose: (...objs: any[]) => void | ((fromFn: () => any[]) => void);
/**基于 raq 的通用封装 */
useFrame(animateFunc: (/** ms */ delta: number) => void): { cancel: () => void };
/** 显示加载模态框 */
requestLoading(text?: string): Promise<void>;
/** 隐藏加载模态框*/
cancelLoading(): void;
/** 保存文件的通用封装*/
saveFile(
fileName: string,
data: ArrayBuffer | TypedArray | DataView | string
): Promise<string | null>;
/** 示例使用 DracoDecoder 时的资源路径 */
DecoderPath: {
GLTF: string;
STANDARD: string;
};
/** 为资源路径拼上 CDN 前缀 */
withCDNPrefix(path: string): string;
/**
* 在小程序中应使用 import { VideoTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/VideoTexture.js';
* 正常情况(web) 可直接使用 THREE.VideoTexture
* */
getVideoTexture(videoOptions: VideoOptions): Promise<[{ isVideoTexture: true }, video: any]>;
/**
* 在小程序中应使用 import { CameraTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/CameraTexture.js';
* 正常情况(web) 可参考示例 webgl_materials_video_webcam
* */
getCameraTexture(): { isVideoTexture: true };
/** 用于动态修改 info 中的占位符*/
bindInfoText(template: `$${string}$`, initValue?: string): { value: string };
/** 分屏控件对应的事件回调 */
onSlideStart(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideEnd(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideChange(handle: (offset: number, boxSize: number) => void): void;
}
export type VideoOptions = {
src: string;
/** 相当于 HTMLVideoElement 的 naturalWidth (小程序中获取不到)*/
width: number;
/** 相当于 HTMLVideoElement 的 naturalHeight (小程序中获取不到)*/
height: number;
loop?: boolean;
autoplay?: boolean;
muted?: boolean;
};
/** 示例小程序中使用的一些配置 */
export interface MiniProgramMeta {
/** 用于统计加载相关信息 */
useLoaders: Loader[];
/** 通用 info */
info: TagItem[][];
/** 特殊 info */
infoPanel?: {
left?: [string, string][];
right?: [string, string][];
};
/** 分屏控件配置 */
needSlider?: {
/** 方向 */
direction?: 'horizontal' | 'vertical';
/** 初始偏移 0-100 */
initPosition?: number;
};
/** 操作摇杆控件 */
needArrowControls?: boolean;
/** 默认需要的画布类型 */
canvasType?: '2d' | 'webgl' | 'webgl2';
/** 为保持效果一致所需要的画布样式 */
canvasStyle?: {
bgColor?: string;
width?: number | string;
height?: number | string;
};
/** 部分示例需要在加载前进行一些提示 */
initAfterConfirm?: {
/**
* 提示类型
* @default 'default'
* */
type?: 'warning' | 'default';
text: string[];
};
}
export interface BaseTag<T extends string> {
tag: T;
content: string;
}
export interface ATag extends BaseTag<'a'> {
link: string;
}
export type TextTag = BaseTag<'text'>;
export type TagItem = TextTag | ATag;