webgl_postprocessing_dof
对应 three.js 示例地址 。
仅需关注 init
函数的内容,其他部分都是示例小程序所使用的描述配置。
js
import * as THREE from "three";
import { EffectComposer } from "three/examples/jsm/postprocessing/EffectComposer.js";
import { RenderPass } from "three/examples/jsm/postprocessing/RenderPass.js";
import { BokehPass } from "three/examples/jsm/postprocessing/BokehPass.js";
import { OutputPass } from "three/examples/jsm/postprocessing/OutputPass.js";
/** @type {import("@minisheeep/mp-three-examples").OfficialExampleInfo} */
const exampleInfo = {
name: "webgl_postprocessing_dof",
useLoaders: [],
info: [
[
{
tag: "a",
link: "https://threejs.org",
content: "three.js"
},
{
tag: "text",
content: "- webgl depth-of-field with bokeh example"
}
],
[
{
tag: "text",
content: "shader by"
},
{
tag: "a",
link: "http://artmartinsh.blogspot.com/2010/02/glsl-lens-blur-filter-with-bokeh.html",
content: "Martins Upitis"
}
]
],
init: ({ window, canvas, GUI, Stats, needToDispose, useFrame }) => {
let camera, scene, renderer, stats, singleMaterial, zmaterial, parameters, nobjects, cubeMaterial;
let mouseX = 0, mouseY = 0;
let windowHalfX = window.innerWidth / 2;
let windowHalfY = window.innerHeight / 2;
let width = window.innerWidth;
let height = window.innerHeight;
const materials = [];
const postprocessing = {};
init();
function init() {
camera = new THREE.PerspectiveCamera(70, width / height, 1, 3e3);
camera.position.z = 200;
scene = new THREE.Scene();
renderer = new THREE.WebGLRenderer({ canvas });
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(width, height);
renderer.setAnimationLoop(animate);
const path = "textures/cube/SwedishRoyalCastle/";
const format = ".jpg";
const urls = [
path + "px" + format,
path + "nx" + format,
path + "py" + format,
path + "ny" + format,
path + "pz" + format,
path + "nz" + format
];
const textureCube = new THREE.CubeTextureLoader().load(urls);
parameters = { color: 16730368, envMap: textureCube };
cubeMaterial = new THREE.MeshBasicMaterial(parameters);
singleMaterial = false;
if (singleMaterial) zmaterial = [cubeMaterial];
const geo = new THREE.SphereGeometry(1, 20, 10);
const xgrid = 14, ygrid = 9, zgrid = 14;
nobjects = xgrid * ygrid * zgrid;
const s = 60;
let count = 0;
for (let i = 0; i < xgrid; i++) {
for (let j = 0; j < ygrid; j++) {
for (let k = 0; k < zgrid; k++) {
let mesh;
if (singleMaterial) {
mesh = new THREE.Mesh(geo, zmaterial);
} else {
mesh = new THREE.Mesh(geo, new THREE.MeshBasicMaterial(parameters));
materials[count] = mesh.material;
}
const x = 200 * (i - xgrid / 2);
const y = 200 * (j - ygrid / 2);
const z = 200 * (k - zgrid / 2);
mesh.position.set(x, y, z);
mesh.scale.set(s, s, s);
mesh.matrixAutoUpdate = false;
mesh.updateMatrix();
scene.add(mesh);
count++;
}
}
}
initPostprocessing();
renderer.autoClear = false;
stats = new Stats(renderer);
canvas.addEventListener("pointermove", onPointerMove);
window.addEventListener("resize", onWindowResize);
const effectController = {
focus: 500,
aperture: 5,
maxblur: 0.01
};
const matChanger = function() {
postprocessing.bokeh.uniforms["focus"].value = effectController.focus;
postprocessing.bokeh.uniforms["aperture"].value = effectController.aperture * 1e-5;
postprocessing.bokeh.uniforms["maxblur"].value = effectController.maxblur;
};
const gui = new GUI();
gui.add(effectController, "focus", 10, 3e3, 10).onChange(matChanger);
gui.add(effectController, "aperture", 0, 10, 0.1).onChange(matChanger);
gui.add(effectController, "maxblur", 0, 0.01, 1e-3).onChange(matChanger);
gui.close();
matChanger();
needToDispose(renderer, scene);
}
function onPointerMove(event) {
if (event.isPrimary === false) return;
mouseX = event.clientX - windowHalfX;
mouseY = event.clientY - windowHalfY;
}
function onWindowResize() {
windowHalfX = window.innerWidth / 2;
windowHalfY = window.innerHeight / 2;
width = window.innerWidth;
height = window.innerHeight;
camera.aspect = width / height;
camera.updateProjectionMatrix();
renderer.setSize(width, height);
postprocessing.composer.setSize(width, height);
}
function initPostprocessing() {
const renderPass = new RenderPass(scene, camera);
const bokehPass = new BokehPass(scene, camera, {
focus: 1,
aperture: 0.025,
maxblur: 0.01
});
const outputPass = new OutputPass();
const composer = new EffectComposer(renderer);
composer.addPass(renderPass);
composer.addPass(bokehPass);
composer.addPass(outputPass);
postprocessing.composer = composer;
postprocessing.bokeh = bokehPass;
needToDispose(composer);
}
function animate() {
stats.begin();
render();
stats.end();
stats.update();
}
function render() {
const time = Date.now() * 5e-5;
camera.position.x += (mouseX - camera.position.x) * 0.036;
camera.position.y += (-mouseY - camera.position.y) * 0.036;
camera.lookAt(scene.position);
if (!singleMaterial) {
for (let i = 0; i < nobjects; i++) {
const h = 360 * (i / nobjects + time) % 360 / 360;
materials[i].color.setHSL(h, 1, 0.5);
}
}
postprocessing.composer.render(0.1);
}
}
};
export {
exampleInfo as default
};
ts
import { Loader, TypedArray } from 'three';
/**
* 官网示例的多端使用封装把版本
* */
export interface OfficialExampleInfo extends MiniProgramMeta {
/*** 示例名称(保持和官网一致)*/
name: string;
/** main */
init: (context: LoadContext) => void;
}
export interface LoadContext {
//为了减少官方代码的改动,实际上等同于 canvas
window: EventTarget & { innerWidth: number; innerHeight: number; devicePixelRatio: number };
/** HTMLCanvasElement */
canvas: any;
/** https://www.npmjs.com/package/lil-gui */
GUI: any;
/**
* https://www.npmjs.com/package/stats.js
* 也可以使用其他受支持的版本
* */
Stats: any;
/** 收集需要 dispose 的对象(官方示例没有处理这部分)*/
needToDispose: (...objs: any[]) => void | ((fromFn: () => any[]) => void);
/**基于 raq 的通用封装 */
useFrame(animateFunc: (/** ms */ delta: number) => void): { cancel: () => void };
/** 显示加载模态框 */
requestLoading(text?: string): Promise<void>;
/** 隐藏加载模态框*/
cancelLoading(): void;
/** 保存文件的通用封装*/
saveFile(
fileName: string,
data: ArrayBuffer | TypedArray | DataView | string
): Promise<string | null>;
/** 示例使用 DracoDecoder 时的资源路径 */
DecoderPath: {
GLTF: string;
STANDARD: string;
};
/** 为资源路径拼上 CDN 前缀 */
withCDNPrefix(path: string): string;
/**
* 在小程序中应使用 import { VideoTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/VideoTexture.js';
* 正常情况(web) 可直接使用 THREE.VideoTexture
* */
getVideoTexture(videoOptions: VideoOptions): Promise<[{ isVideoTexture: true }, video: any]>;
/**
* 在小程序中应使用 import { CameraTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/CameraTexture.js';
* 正常情况(web) 可参考示例 webgl_materials_video_webcam
* */
getCameraTexture(): { isVideoTexture: true };
/** 用于动态修改 info 中的占位符*/
bindInfoText(template: `$${string}$`, initValue?: string): { value: string };
/** 分屏控件对应的事件回调 */
onSlideStart(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideEnd(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideChange(handle: (offset: number, boxSize: number) => void): void;
}
export type VideoOptions = {
src: string;
/** 相当于 HTMLVideoElement 的 naturalWidth (小程序中获取不到)*/
width: number;
/** 相当于 HTMLVideoElement 的 naturalHeight (小程序中获取不到)*/
height: number;
loop?: boolean;
autoplay?: boolean;
muted?: boolean;
};
/** 示例小程序中使用的一些配置 */
export interface MiniProgramMeta {
/** 用于统计加载相关信息 */
useLoaders: Loader[];
/** 通用 info */
info: TagItem[][];
/** 特殊 info */
infoPanel?: {
left?: [string, string][];
right?: [string, string][];
};
/** 分屏控件配置 */
needSlider?: {
/** 方向 */
direction?: 'horizontal' | 'vertical';
/** 初始偏移 0-100 */
initPosition?: number;
};
/** 操作摇杆控件 */
needArrowControls?: boolean;
/** 默认需要的画布类型 */
canvasType?: '2d' | 'webgl' | 'webgl2';
/** 为保持效果一致所需要的画布样式 */
canvasStyle?: {
bgColor?: string;
width?: number | string;
height?: number | string;
};
/** 部分示例需要在加载前进行一些提示 */
initAfterConfirm?: {
/**
* 提示类型
* @default 'default'
* */
type?: 'warning' | 'default';
text: string[];
};
}
export interface BaseTag<T extends string> {
tag: T;
content: string;
}
export interface ATag extends BaseTag<'a'> {
link: string;
}
export type TextTag = BaseTag<'text'>;
export type TagItem = TextTag | ATag;