Skip to content

webgl_materials_video

对应 three.js 示例地址

仅需关注 init 函数的内容,其他部分都是示例小程序所使用的描述配置。

js
import * as THREE from "three";
import { EffectComposer } from "three/examples/jsm/postprocessing/EffectComposer.js";
import { RenderPass } from "three/examples/jsm/postprocessing/RenderPass.js";
import { BloomPass } from "three/examples/jsm/postprocessing/BloomPass.js";
import { OutputPass } from "three/examples/jsm/postprocessing/OutputPass.js";

/** @type {import("@minisheeep/mp-three-examples").OfficialExampleInfo} */
const exampleInfo = {
  name: "webgl_materials_video",
  useLoaders: [],
  initAfterConfirm: {
    text: ["注意音量"]
  },
  info: [
    [
      {
        tag: "a",
        link: "https://threejs.org",
        content: "three.js"
      },
      {
        tag: "text",
        content: "- webgl video demo"
      }
    ],
    [
      {
        tag: "text",
        content: "playing"
      },
      {
        tag: "a",
        link: "http://durian.blender.org/",
        content: "sintel"
      },
      {
        tag: "text",
        content: "trailer"
      }
    ]
  ],
  init: ({
    window,
    canvas,
    GUI,
    Stats,
    needToDispose,
    useFrame,
    getVideoTexture,
    withCDNPrefix
  }) => {
    let camera, scene, renderer;
    let video, texture, material, mesh;
    let composer;
    let mouseX = 0;
    let mouseY = 0;
    let windowHalfX = window.innerWidth / 2;
    let windowHalfY = window.innerHeight / 2;
    let cube_count;
    const meshes = [], materials = [], xgrid = 20, ygrid = 10;
    async function init() {
      camera = new THREE.PerspectiveCamera(40, window.innerWidth / window.innerHeight, 1, 1e4);
      camera.position.z = 500;
      scene = new THREE.Scene();
      const light = new THREE.DirectionalLight(16777215, 3);
      light.position.set(0.5, 1, 1).normalize();
      scene.add(light);
      renderer = new THREE.WebGLRenderer({ canvas });
      renderer.setPixelRatio(window.devicePixelRatio);
      renderer.setSize(window.innerWidth, window.innerHeight);
      [texture, video] = await getVideoTexture({
        src: withCDNPrefix("textures/sintel.mp4"),
        width: 480,
        height: 204,
        muted: false,
        loop: true
      });
      if ("seek" in video) {
        video.seek(3);
      } else {
        video.currentTime = 3;
      }
      video.play();
      texture.colorSpace = THREE.SRGBColorSpace;
      let i, j, ox, oy, geometry;
      const ux = 1 / xgrid;
      const uy = 1 / ygrid;
      const xsize = 480 / xgrid;
      const ysize = 204 / ygrid;
      const parameters = { color: 16777215, map: texture };
      cube_count = 0;
      for (i = 0; i < xgrid; i++) {
        for (j = 0; j < ygrid; j++) {
          ox = i;
          oy = j;
          geometry = new THREE.BoxGeometry(xsize, ysize, xsize);
          change_uvs(geometry, ux, uy, ox, oy);
          materials[cube_count] = new THREE.MeshLambertMaterial(parameters);
          material = materials[cube_count];
          material.hue = i / xgrid;
          material.saturation = 1 - j / ygrid;
          material.color.setHSL(material.hue, material.saturation, 0.5);
          mesh = new THREE.Mesh(geometry, material);
          mesh.position.x = (i - xgrid / 2) * xsize;
          mesh.position.y = (j - ygrid / 2) * ysize;
          mesh.position.z = 0;
          mesh.scale.x = mesh.scale.y = mesh.scale.z = 1;
          scene.add(mesh);
          mesh.dx = 1e-3 * (0.5 - Math.random());
          mesh.dy = 1e-3 * (0.5 - Math.random());
          meshes[cube_count] = mesh;
          cube_count += 1;
        }
      }
      renderer.autoClear = false;
      canvas.addEventListener("pointermove", onDocumentMouseMove);
      const renderPass = new RenderPass(scene, camera);
      const bloomPass = new BloomPass(1.3);
      const outputPass = new OutputPass();
      composer = new EffectComposer(renderer);
      composer.addPass(renderPass);
      composer.addPass(bloomPass);
      composer.addPass(outputPass);
      window.addEventListener("resize", onWindowResize);
      needToDispose(renderer, scene);
      renderer.setAnimationLoop(animate);
    }
    function onWindowResize() {
      windowHalfX = window.innerWidth / 2;
      windowHalfY = window.innerHeight / 2;
      camera.aspect = window.innerWidth / window.innerHeight;
      camera.updateProjectionMatrix();
      renderer.setSize(window.innerWidth, window.innerHeight);
      composer.setSize(window.innerWidth, window.innerHeight);
    }
    function change_uvs(geometry, unitx, unity, offsetx, offsety) {
      const uvs = geometry.attributes.uv.array;
      for (let i = 0; i < uvs.length; i += 2) {
        uvs[i] = (uvs[i] + offsetx) * unitx;
        uvs[i + 1] = (uvs[i + 1] + offsety) * unity;
      }
    }
    function onDocumentMouseMove(event) {
      mouseX = event.clientX - windowHalfX;
      mouseY = (event.clientY - windowHalfY) * 0.3;
    }
    let h, counter = 1;
    function animate() {
      if (texture) {
        texture.update();
      }
      const time = Date.now() * 5e-5;
      camera.position.x += (mouseX - camera.position.x) * 0.05;
      camera.position.y += (-mouseY - camera.position.y) * 0.05;
      camera.lookAt(scene.position);
      for (let i = 0; i < cube_count; i++) {
        material = materials[i];
        h = 360 * (material.hue + time) % 360 / 360;
        material.color.setHSL(h, material.saturation, 0.5);
      }
      if (counter % 1e3 > 200) {
        for (let i = 0; i < cube_count; i++) {
          mesh = meshes[i];
          mesh.rotation.x += 10 * mesh.dx;
          mesh.rotation.y += 10 * mesh.dy;
          mesh.position.x -= 150 * mesh.dx;
          mesh.position.y += 150 * mesh.dy;
          mesh.position.z += 300 * mesh.dx;
        }
      }
      if (counter % 1e3 === 0) {
        for (let i = 0; i < cube_count; i++) {
          mesh = meshes[i];
          mesh.dx *= -1;
          mesh.dy *= -1;
        }
      }
      counter++;
      renderer.clear();
      composer.render();
    }
    init();
  }
};
export {
  exampleInfo as default
};
ts
import { Loader, TypedArray } from 'three';
/**
 * 官网示例的多端使用封装把版本
 * */
export interface OfficialExampleInfo extends MiniProgramMeta {
  /*** 示例名称(保持和官网一致)*/
  name: string;
  /** main */
  init: (context: LoadContext) => void;
}

export interface LoadContext {
  //为了减少官方代码的改动,实际上等同于 canvas
  window: EventTarget & { innerWidth: number; innerHeight: number; devicePixelRatio: number };
  /** HTMLCanvasElement */
  canvas: any;
  /** https://www.npmjs.com/package/lil-gui */
  GUI: any;
  /**
   * https://www.npmjs.com/package/stats.js
   * 也可以使用其他受支持的版本
   * */
  Stats: any;
  /** 收集需要 dispose 的对象(官方示例没有处理这部分)*/
  needToDispose: (...objs: any[]) => void | ((fromFn: () => any[]) => void);

  /**基于 raq 的通用封装 */
  useFrame(animateFunc: (/** ms */ delta: number) => void): { cancel: () => void };

  /** 显示加载模态框 */
  requestLoading(text?: string): Promise<void>;

  /** 隐藏加载模态框*/
  cancelLoading(): void;

  /** 保存文件的通用封装*/
  saveFile(
    fileName: string,
    data: ArrayBuffer | TypedArray | DataView | string
  ): Promise<string | null>;

  /** 示例使用 DracoDecoder 时的资源路径 */
  DecoderPath: {
    GLTF: string;
    STANDARD: string;
  };

  /** 为资源路径拼上 CDN 前缀 */
  withCDNPrefix(path: string): string;

  /**
   * 在小程序中应使用 import { VideoTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/VideoTexture.js';
   * 正常情况(web) 可直接使用 THREE.VideoTexture
   * */
  getVideoTexture(videoOptions: VideoOptions): Promise<[{ isVideoTexture: true }, video: any]>;

  /**
   * 在小程序中应使用 import { CameraTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/CameraTexture.js';
   * 正常情况(web) 可参考示例 webgl_materials_video_webcam
   * */
  getCameraTexture(): { isVideoTexture: true };

  /** 用于动态修改 info 中的占位符*/
  bindInfoText(template: `$${string}$`, initValue?: string): { value: string };

  /** 分屏控件对应的事件回调 */
  onSlideStart(handle: () => void): void;
  /** 分屏控件对应的事件回调 */
  onSlideEnd(handle: () => void): void;
  /** 分屏控件对应的事件回调 */
  onSlideChange(handle: (offset: number, boxSize: number) => void): void;
}

export type VideoOptions = {
  src: string;
  /** 相当于 HTMLVideoElement 的 naturalWidth (小程序中获取不到)*/
  width: number;
  /** 相当于 HTMLVideoElement 的 naturalHeight (小程序中获取不到)*/
  height: number;
  loop?: boolean;
  autoplay?: boolean;
  muted?: boolean;
};

/** 示例小程序中使用的一些配置 */
export interface MiniProgramMeta {
  /** 用于统计加载相关信息 */
  useLoaders: Loader[];
  /** 通用 info */
  info: TagItem[][];
  /** 特殊 info */
  infoPanel?: {
    left?: [string, string][];
    right?: [string, string][];
  };
  /** 分屏控件配置 */
  needSlider?: {
    /** 方向 */
    direction?: 'horizontal' | 'vertical';
    /** 初始偏移 0-100 */
    initPosition?: number;
  };
  /** 操作摇杆控件 */
  needArrowControls?: boolean;
  /** 默认需要的画布类型 */
  canvasType?: '2d' | 'webgl' | 'webgl2';
  /** 为保持效果一致所需要的画布样式 */
  canvasStyle?: {
    bgColor?: string;
    width?: number | string;
    height?: number | string;
  };
  /** 部分示例需要在加载前进行一些提示 */
  initAfterConfirm?: {
    /**
     * 提示类型
     * @default 'default'
     * */
    type?: 'warning' | 'default';
    text: string[];
  };
}

export interface BaseTag<T extends string> {
  tag: T;
  content: string;
}

export interface ATag extends BaseTag<'a'> {
  link: string;
}

export type TextTag = BaseTag<'text'>;

export type TagItem = TextTag | ATag;