webgl_video_kinect
对应 three.js 示例地址 。
仅需关注 init
函数的内容,其他部分都是示例小程序所使用的描述配置。
js
import * as THREE from "three";
/** @type {import("@minisheeep/mp-three-examples").OfficialExampleInfo} */
const exampleInfo = {
name: "webgl_video_kinect",
useLoaders: [],
info: [
[
{
tag: "a",
content: "three.js"
},
{
tag: "text",
content: "- kinect"
}
]
],
init: ({
window,
canvas,
GUI,
Stats,
needToDispose,
useFrame,
getVideoTexture,
withCDNPrefix
}) => {
let scene, camera, renderer;
let geometry, mesh, material;
let mouse, center;
let texture;
init();
async function init() {
camera = new THREE.PerspectiveCamera(50, window.innerWidth / window.innerHeight, 1, 1e4);
camera.position.set(0, 0, 500);
scene = new THREE.Scene();
center = new THREE.Vector3();
center.z = -1e3;
const [videoTexture, video] = await getVideoTexture({
width: 474,
height: 490,
src: withCDNPrefix(`textures/kinect.mp4`),
loop: true
});
texture = videoTexture;
texture.minFilter = THREE.NearestFilter;
const width = 640, height = 480;
const nearClipping = 850, farClipping = 4e3;
geometry = new THREE.BufferGeometry();
const vertices = new Float32Array(width * height * 3);
for (let i = 0, j = 0, l = vertices.length; i < l; i += 3, j++) {
vertices[i] = j % width;
vertices[i + 1] = Math.floor(j / width);
}
geometry.setAttribute("position", new THREE.BufferAttribute(vertices, 3));
material = new THREE.ShaderMaterial({
uniforms: {
map: { value: texture },
width: { value: width },
height: { value: height },
nearClipping: { value: nearClipping },
farClipping: { value: farClipping },
pointSize: { value: 2 },
zOffset: { value: 1e3 }
},
vertexShader: `
uniform sampler2D map;
uniform float width;
uniform float height;
uniform float nearClipping, farClipping;
uniform float pointSize;
uniform float zOffset;
varying vec2 vUv;
const float XtoZ = 1.11146; // tan( 1.0144686 / 2.0 ) * 2.0;
const float YtoZ = 0.83359; // tan( 0.7898090 / 2.0 ) * 2.0;
void main() {
vUv = vec2( position.x / width, position.y / height );
vec4 color = texture2D( map, vUv );
float depth = ( color.r + color.g + color.b ) / 3.0;
// Projection code by @kcmic
float z = ( 1.0 - depth ) * (farClipping - nearClipping) + nearClipping;
vec4 pos = vec4(
( position.x / width - 0.5 ) * z * XtoZ,
( position.y / height - 0.5 ) * z * YtoZ,
- z + zOffset,
1.0);
gl_PointSize = pointSize;
gl_Position = projectionMatrix * modelViewMatrix * pos;
}
`,
fragmentShader: `
uniform sampler2D map;
varying vec2 vUv;
void main() {
vec4 color = texture2D( map, vUv );
gl_FragColor = vec4( color.r, color.g, color.b, 0.2 );
}
`,
blending: THREE.AdditiveBlending,
depthTest: false,
depthWrite: false,
transparent: true
});
mesh = new THREE.Points(geometry, material);
scene.add(mesh);
const gui = new GUI();
gui.add(material.uniforms.nearClipping, "value", 1, 1e4, 1).name("nearClipping");
gui.add(material.uniforms.farClipping, "value", 1, 1e4, 1).name("farClipping");
gui.add(material.uniforms.pointSize, "value", 1, 10, 1).name("pointSize");
gui.add(material.uniforms.zOffset, "value", 0, 4e3, 1).name("zOffset");
video.play();
renderer = new THREE.WebGLRenderer({ canvas });
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setAnimationLoop(animate);
mouse = new THREE.Vector3(0, 0, 1);
canvas.addEventListener("pointermove", onDocumentMouseMove);
window.addEventListener("resize", onWindowResize);
needToDispose(renderer, scene);
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
function onDocumentMouseMove(event) {
mouse.x = (event.clientX - window.innerWidth / 2) * 8;
mouse.y = (event.clientY - window.innerHeight / 2) * 8;
}
function animate() {
if (texture) {
texture.update();
}
camera.position.x += (mouse.x - camera.position.x) * 0.05;
camera.position.y += (-mouse.y - camera.position.y) * 0.05;
camera.lookAt(center);
renderer.render(scene, camera);
}
}
};
export {
exampleInfo as default
};
ts
import { Loader, TypedArray } from 'three';
/**
* 官网示例的多端使用封装把版本
* */
export interface OfficialExampleInfo extends MiniProgramMeta {
/*** 示例名称(保持和官网一致)*/
name: string;
/** main */
init: (context: LoadContext) => void;
}
export interface LoadContext {
//为了减少官方代码的改动,实际上等同于 canvas
window: EventTarget & { innerWidth: number; innerHeight: number; devicePixelRatio: number };
/** HTMLCanvasElement */
canvas: any;
/** https://www.npmjs.com/package/lil-gui */
GUI: any;
/**
* https://www.npmjs.com/package/stats.js
* 也可以使用其他受支持的版本
* */
Stats: any;
/** 收集需要 dispose 的对象(官方示例没有处理这部分)*/
needToDispose: (...objs: any[]) => void | ((fromFn: () => any[]) => void);
/**基于 raq 的通用封装 */
useFrame(animateFunc: (/** ms */ delta: number) => void): { cancel: () => void };
/** 显示加载模态框 */
requestLoading(text?: string): Promise<void>;
/** 隐藏加载模态框*/
cancelLoading(): void;
/** 保存文件的通用封装*/
saveFile(
fileName: string,
data: ArrayBuffer | TypedArray | DataView | string
): Promise<string | null>;
/** 示例使用 DracoDecoder 时的资源路径 */
DecoderPath: {
GLTF: string;
STANDARD: string;
};
/** 为资源路径拼上 CDN 前缀 */
withCDNPrefix(path: string): string;
/**
* 在小程序中应使用 import { VideoTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/VideoTexture.js';
* 正常情况(web) 可直接使用 THREE.VideoTexture
* */
getVideoTexture(videoOptions: VideoOptions): Promise<[{ isVideoTexture: true }, video: any]>;
/**
* 在小程序中应使用 import { CameraTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/CameraTexture.js';
* 正常情况(web) 可参考示例 webgl_materials_video_webcam
* */
getCameraTexture(): { isVideoTexture: true };
/** 用于动态修改 info 中的占位符*/
bindInfoText(template: `$${string}$`, initValue?: string): { value: string };
/** 分屏控件对应的事件回调 */
onSlideStart(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideEnd(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideChange(handle: (offset: number, boxSize: number) => void): void;
}
export type VideoOptions = {
src: string;
/** 相当于 HTMLVideoElement 的 naturalWidth (小程序中获取不到)*/
width: number;
/** 相当于 HTMLVideoElement 的 naturalHeight (小程序中获取不到)*/
height: number;
loop?: boolean;
autoplay?: boolean;
muted?: boolean;
};
/** 示例小程序中使用的一些配置 */
export interface MiniProgramMeta {
/** 用于统计加载相关信息 */
useLoaders: Loader[];
/** 通用 info */
info: TagItem[][];
/** 特殊 info */
infoPanel?: {
left?: [string, string][];
right?: [string, string][];
};
/** 分屏控件配置 */
needSlider?: {
/** 方向 */
direction?: 'horizontal' | 'vertical';
/** 初始偏移 0-100 */
initPosition?: number;
};
/** 操作摇杆控件 */
needArrowControls?: boolean;
/** 默认需要的画布类型 */
canvasType?: '2d' | 'webgl' | 'webgl2';
/** 为保持效果一致所需要的画布样式 */
canvasStyle?: {
bgColor?: string;
width?: number | string;
height?: number | string;
};
/** 部分示例需要在加载前进行一些提示 */
initAfterConfirm?: {
/**
* 提示类型
* @default 'default'
* */
type?: 'warning' | 'default';
text: string[];
};
}
export interface BaseTag<T extends string> {
tag: T;
content: string;
}
export interface ATag extends BaseTag<'a'> {
link: string;
}
export type TextTag = BaseTag<'text'>;
export type TagItem = TextTag | ATag;