webgl_raymarching_reflect
对应 three.js 示例地址 。
仅需关注 init
函数的内容,其他部分都是示例小程序所使用的描述配置。
js
import * as THREE from "three";
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls.js";
/** @type {import("@minisheeep/mp-three-examples").OfficialExampleInfo} */
const exampleInfo = {
name: "webgl_raymarching_reflect",
useLoaders: [],
info: [
[
{
tag: "a",
link: "https://threejs.org",
content: "three.js"
},
{
tag: "text",
content: "- webgl raymarching example"
}
],
[
{
tag: "text",
content: "reflect by"
},
{
tag: "a",
link: "https://github.com/gam0022",
content: "gam0022"
},
{
tag: "text",
content: "("
},
{
tag: "a",
link: "http://qiita.com/gam0022/items/03699a07e4a4b5f2d41f",
content: "article"
},
{
tag: "text",
content: ")"
}
]
],
init: ({ window, canvas, GUI, Stats, needToDispose, useFrame }) => {
let dolly, camera, scene, renderer;
let geometry, material, mesh;
let stats, clock;
init();
function init() {
renderer = new THREE.WebGLRenderer({ canvas });
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setAnimationLoop(animate);
window.addEventListener("resize", onWindowResize);
scene = new THREE.Scene();
dolly = new THREE.Group();
scene.add(dolly);
clock = new THREE.Clock();
camera = new THREE.PerspectiveCamera(60, canvas.width / canvas.height, 1, 2e3);
camera.position.z = 4;
dolly.add(camera);
geometry = new THREE.PlaneGeometry(2, 2);
material = new THREE.RawShaderMaterial({
uniforms: {
resolution: { value: new THREE.Vector2(canvas.width, canvas.height) },
cameraWorldMatrix: { value: camera.matrixWorld },
cameraProjectionMatrixInverse: { value: camera.projectionMatrixInverse.clone() }
},
vertexShader: `
attribute vec3 position;
void main(void) {
gl_Position = vec4(position, 1.0);
}
`,
fragmentShader: `
precision highp float;
uniform vec2 resolution;
uniform mat4 viewMatrix;
uniform vec3 cameraPosition;
uniform mat4 cameraWorldMatrix;
uniform mat4 cameraProjectionMatrixInverse;
const float EPS = 0.01;
const float OFFSET = EPS * 100.0;
const vec3 lightDir = vec3( -0.48666426339228763, 0.8111071056538127, -0.3244428422615251 );
// distance functions
vec3 opRep( vec3 p, float interval ) {
vec2 q = mod( p.xz, interval ) - interval * 0.5;
return vec3( q.x, p.y, q.y );
}
float sphereDist( vec3 p, float r ) {
return length( opRep( p, 3.0 ) ) - r;
}
float floorDist( vec3 p ){
return dot(p, vec3( 0.0, 1.0, 0.0 ) ) + 1.0;
}
vec4 minVec4( vec4 a, vec4 b ) {
return ( a.a < b.a ) ? a : b;
}
float checkeredPattern( vec3 p ) {
float u = 1.0 - floor( mod( p.x, 2.0 ) );
float v = 1.0 - floor( mod( p.z, 2.0 ) );
if ( ( u == 1.0 && v < 1.0 ) || ( u < 1.0 && v == 1.0 ) ) {
return 0.2;
} else {
return 1.0;
}
}
vec3 hsv2rgb( vec3 c ) {
vec4 K = vec4( 1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0 );
vec3 p = abs( fract( c.xxx + K.xyz ) * 6.0 - K.www );
return c.z * mix( K.xxx, clamp( p - K.xxx, 0.0, 1.0 ), c.y );
}
float sceneDist( vec3 p ) {
return min(
sphereDist( p, 1.0 ),
floorDist( p )
);
}
vec4 sceneColor( vec3 p ) {
return minVec4(
// 3 * 6 / 2 = 9
vec4( hsv2rgb(vec3( ( p.z + p.x ) / 9.0, 1.0, 1.0 ) ), sphereDist( p, 1.0 ) ),
vec4( vec3( 0.5 ) * checkeredPattern( p ), floorDist( p ) )
);
}
vec3 getNormal( vec3 p ) {
return normalize(vec3(
sceneDist(p + vec3( EPS, 0.0, 0.0 ) ) - sceneDist(p + vec3( -EPS, 0.0, 0.0 ) ),
sceneDist(p + vec3( 0.0, EPS, 0.0 ) ) - sceneDist(p + vec3( 0.0, -EPS, 0.0 ) ),
sceneDist(p + vec3( 0.0, 0.0, EPS ) ) - sceneDist(p + vec3( 0.0, 0.0, -EPS ) )
));
}
float getShadow( vec3 ro, vec3 rd ) {
float h = 0.0;
float c = 0.0;
float r = 1.0;
float shadowCoef = 0.5;
for ( float t = 0.0; t < 50.0; t++ ) {
h = sceneDist( ro + rd * c );
if ( h < EPS ) return shadowCoef;
r = min( r, h * 16.0 / c );
c += h;
}
return 1.0 - shadowCoef + r * shadowCoef;
}
vec3 getRayColor( vec3 origin, vec3 ray, out vec3 pos, out vec3 normal, out bool hit ) {
// marching loop
float dist;
float depth = 0.0;
pos = origin;
for ( int i = 0; i < 64; i++ ){
dist = sceneDist( pos );
depth += dist;
pos = origin + depth * ray;
if ( abs(dist) < EPS ) break;
}
// hit check and calc color
vec3 color;
if ( abs(dist) < EPS ) {
normal = getNormal( pos );
float diffuse = clamp( dot( lightDir, normal ), 0.1, 1.0 );
float specular = pow( clamp( dot( reflect( lightDir, normal ), ray ), 0.0, 1.0 ), 10.0 );
float shadow = getShadow( pos + normal * OFFSET, lightDir );
color = ( sceneColor( pos ).rgb * diffuse + vec3( 0.8 ) * specular ) * max( 0.5, shadow );
hit = true;
} else {
color = vec3( 0.0 );
}
return color - pow( clamp( 0.05 * depth, 0.0, 0.6 ), 2.0 );
}
void main(void) {
// screen position
vec2 screenPos = ( gl_FragCoord.xy * 2.0 - resolution ) / resolution;
// ray direction in normalized device coordinate
vec4 ndcRay = vec4( screenPos.xy, 1.0, 1.0 );
// convert ray direction from normalized device coordinate to world coordinate
vec3 ray = ( cameraWorldMatrix * cameraProjectionMatrixInverse * ndcRay ).xyz;
ray = normalize( ray );
// camera position
vec3 cPos = cameraPosition;
// cast ray
vec3 color = vec3( 0.0 );
vec3 pos, normal;
bool hit;
float alpha = 1.0;
for ( int i = 0; i < 3; i++ ) {
color += alpha * getRayColor( cPos, ray, pos, normal, hit );
alpha *= 0.3;
ray = normalize( reflect( ray, normal ) );
cPos = pos + normal * OFFSET;
if ( !hit ) break;
}
gl_FragColor = vec4( color, 1.0 );
}
`
});
mesh = new THREE.Mesh(geometry, material);
mesh.frustumCulled = false;
scene.add(mesh);
const controls = new OrbitControls(camera, renderer.domElement);
controls.enableZoom = false;
stats = new Stats(renderer);
needToDispose(renderer, scene, controls);
}
function onWindowResize() {
renderer.setSize(window.innerWidth, window.innerHeight);
camera.aspect = canvas.width / canvas.height;
camera.updateProjectionMatrix();
material.uniforms.resolution.value.set(canvas.width, canvas.height);
material.uniforms.cameraProjectionMatrixInverse.value.copy(camera.projectionMatrixInverse);
}
function animate() {
stats.begin();
const elapsedTime = clock.getElapsedTime();
dolly.position.z = -elapsedTime;
renderer.render(scene, camera);
stats.end();
stats.update();
}
}
};
export {
exampleInfo as default
};
ts
import { Loader, TypedArray } from 'three';
/**
* 官网示例的多端使用封装把版本
* */
export interface OfficialExampleInfo extends MiniProgramMeta {
/*** 示例名称(保持和官网一致)*/
name: string;
/** main */
init: (context: LoadContext) => void;
}
export interface LoadContext {
//为了减少官方代码的改动,实际上等同于 canvas
window: EventTarget & { innerWidth: number; innerHeight: number; devicePixelRatio: number };
/** HTMLCanvasElement */
canvas: any;
/** https://www.npmjs.com/package/lil-gui */
GUI: any;
/**
* https://www.npmjs.com/package/stats.js
* 也可以使用其他受支持的版本
* */
Stats: any;
/** 收集需要 dispose 的对象(官方示例没有处理这部分)*/
needToDispose: (...objs: any[]) => void | ((fromFn: () => any[]) => void);
/**基于 raq 的通用封装 */
useFrame(animateFunc: (/** ms */ delta: number) => void): { cancel: () => void };
/** 显示加载模态框 */
requestLoading(text?: string): Promise<void>;
/** 隐藏加载模态框*/
cancelLoading(): void;
/** 保存文件的通用封装*/
saveFile(
fileName: string,
data: ArrayBuffer | TypedArray | DataView | string
): Promise<string | null>;
/** 示例使用 DracoDecoder 时的资源路径 */
DecoderPath: {
GLTF: string;
STANDARD: string;
};
/** 为资源路径拼上 CDN 前缀 */
withCDNPrefix(path: string): string;
/**
* 在小程序中应使用 import { VideoTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/VideoTexture.js';
* 正常情况(web) 可直接使用 THREE.VideoTexture
* */
getVideoTexture(videoOptions: VideoOptions): Promise<[{ isVideoTexture: true }, video: any]>;
/**
* 在小程序中应使用 import { CameraTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/CameraTexture.js';
* 正常情况(web) 可参考示例 webgl_materials_video_webcam
* */
getCameraTexture(): { isVideoTexture: true };
/** 用于动态修改 info 中的占位符*/
bindInfoText(template: `$${string}$`, initValue?: string): { value: string };
/** 分屏控件对应的事件回调 */
onSlideStart(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideEnd(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideChange(handle: (offset: number, boxSize: number) => void): void;
}
export type VideoOptions = {
src: string;
/** 相当于 HTMLVideoElement 的 naturalWidth (小程序中获取不到)*/
width: number;
/** 相当于 HTMLVideoElement 的 naturalHeight (小程序中获取不到)*/
height: number;
loop?: boolean;
autoplay?: boolean;
muted?: boolean;
};
/** 示例小程序中使用的一些配置 */
export interface MiniProgramMeta {
/** 用于统计加载相关信息 */
useLoaders: Loader[];
/** 通用 info */
info: TagItem[][];
/** 特殊 info */
infoPanel?: {
left?: [string, string][];
right?: [string, string][];
};
/** 分屏控件配置 */
needSlider?: {
/** 方向 */
direction?: 'horizontal' | 'vertical';
/** 初始偏移 0-100 */
initPosition?: number;
};
/** 操作摇杆控件 */
needArrowControls?: boolean;
/** 默认需要的画布类型 */
canvasType?: '2d' | 'webgl' | 'webgl2';
/** 为保持效果一致所需要的画布样式 */
canvasStyle?: {
bgColor?: string;
width?: number | string;
height?: number | string;
};
/** 部分示例需要在加载前进行一些提示 */
initAfterConfirm?: {
/**
* 提示类型
* @default 'default'
* */
type?: 'warning' | 'default';
text: string[];
};
}
export interface BaseTag<T extends string> {
tag: T;
content: string;
}
export interface ATag extends BaseTag<'a'> {
link: string;
}
export type TextTag = BaseTag<'text'>;
export type TagItem = TextTag | ATag;