Skip to content

webgl_buffergeometry_drawrange

对应 three.js 示例地址

仅需关注 init 函数的内容,其他部分都是示例小程序所使用的描述配置。

js
import * as THREE from "three";
import { OrbitControls } from "three/examples/jsm/controls/OrbitControls.js";

/** @type {import("@minisheeep/mp-three-examples").OfficialExampleInfo} */
const exampleInfo = {
  name: "webgl_buffergeometry_drawrange",
  useLoaders: [],
  info: [
    [
      { tag: "a", link: "https://threejs.org", content: "three.js" },
      { tag: "text", content: "webgl - buffergeometry drawrange" }
    ],
    [
      { tag: "text", content: "by" },
      { tag: "a", link: "https://twitter.com/fernandojsg", content: "fernandojsg" }
    ]
  ],
  init: ({ window, canvas, GUI, Stats, needToDispose, useFrame }) => {
    let group;
    let stats;
    const particlesData = [];
    let camera, scene, renderer;
    let positions, colors;
    let particles;
    let pointCloud;
    let particlePositions;
    let linesMesh;
    const maxParticleCount = 1e3;
    let particleCount = 500;
    const r = 800;
    const rHalf = r / 2;
    const effectController = {
      showDots: true,
      showLines: true,
      minDistance: 150,
      limitConnections: false,
      maxConnections: 20,
      particleCount: 500
    };
    init();
    function initGUI() {
      const gui = new GUI();
      gui.add(effectController, "showDots").onChange(function(value) {
        pointCloud.visible = value;
      });
      gui.add(effectController, "showLines").onChange(function(value) {
        linesMesh.visible = value;
      });
      gui.add(effectController, "minDistance", 10, 300);
      gui.add(effectController, "limitConnections");
      gui.add(effectController, "maxConnections", 0, 30, 1);
      gui.add(effectController, "particleCount", 0, maxParticleCount, 1).onChange(function(value) {
        particleCount = value;
        particles.setDrawRange(0, particleCount);
      });
    }
    function init() {
      initGUI();
      camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 1, 4e3);
      camera.position.z = 1750;
      scene = new THREE.Scene();
      group = new THREE.Group();
      scene.add(group);
      const helper = new THREE.BoxHelper(new THREE.Mesh(new THREE.BoxGeometry(r, r, r)));
      helper.material.color.setHex(4671303);
      helper.material.blending = THREE.AdditiveBlending;
      helper.material.transparent = true;
      group.add(helper);
      const segments = maxParticleCount * maxParticleCount;
      positions = new Float32Array(segments * 3);
      colors = new Float32Array(segments * 3);
      const pMaterial = new THREE.PointsMaterial({
        color: 16777215,
        size: 3,
        blending: THREE.AdditiveBlending,
        transparent: true,
        sizeAttenuation: false
      });
      particles = new THREE.BufferGeometry();
      particlePositions = new Float32Array(maxParticleCount * 3);
      for (let i = 0; i < maxParticleCount; i++) {
        const x = Math.random() * r - r / 2;
        const y = Math.random() * r - r / 2;
        const z = Math.random() * r - r / 2;
        particlePositions[i * 3] = x;
        particlePositions[i * 3 + 1] = y;
        particlePositions[i * 3 + 2] = z;
        particlesData.push({
          velocity: new THREE.Vector3(
            -1 + Math.random() * 2,
            -1 + Math.random() * 2,
            -1 + Math.random() * 2
          ),
          numConnections: 0
        });
      }
      particles.setDrawRange(0, particleCount);
      particles.setAttribute(
        "position",
        new THREE.BufferAttribute(particlePositions, 3).setUsage(THREE.DynamicDrawUsage)
      );
      pointCloud = new THREE.Points(particles, pMaterial);
      group.add(pointCloud);
      const geometry = new THREE.BufferGeometry();
      geometry.setAttribute(
        "position",
        new THREE.BufferAttribute(positions, 3).setUsage(THREE.DynamicDrawUsage)
      );
      geometry.setAttribute(
        "color",
        new THREE.BufferAttribute(colors, 3).setUsage(THREE.DynamicDrawUsage)
      );
      geometry.computeBoundingSphere();
      geometry.setDrawRange(0, 0);
      const material = new THREE.LineBasicMaterial({
        vertexColors: true,
        blending: THREE.AdditiveBlending,
        transparent: true
      });
      linesMesh = new THREE.LineSegments(geometry, material);
      group.add(linesMesh);
      renderer = new THREE.WebGLRenderer({ antialias: true, canvas });
      renderer.setPixelRatio(window.devicePixelRatio);
      renderer.setSize(window.innerWidth, window.innerHeight);
      renderer.setAnimationLoop(animate);
      const controls = new OrbitControls(camera, renderer.domElement);
      controls.minDistance = 1e3;
      controls.maxDistance = 3e3;
      stats = new Stats(renderer);
      window.addEventListener("resize", onWindowResize);
      needToDispose(renderer, scene, controls);
    }
    function onWindowResize() {
      camera.aspect = window.innerWidth / window.innerHeight;
      camera.updateProjectionMatrix();
      renderer.setSize(window.innerWidth, window.innerHeight);
    }
    function animate() {
      let vertexpos = 0;
      let colorpos = 0;
      let numConnected = 0;
      for (let i = 0; i < particleCount; i++) particlesData[i].numConnections = 0;
      for (let i = 0; i < particleCount; i++) {
        const particleData = particlesData[i];
        particlePositions[i * 3] += particleData.velocity.x;
        particlePositions[i * 3 + 1] += particleData.velocity.y;
        particlePositions[i * 3 + 2] += particleData.velocity.z;
        if (particlePositions[i * 3 + 1] < -400 || particlePositions[i * 3 + 1] > rHalf)
          particleData.velocity.y = -particleData.velocity.y;
        if (particlePositions[i * 3] < -400 || particlePositions[i * 3] > rHalf)
          particleData.velocity.x = -particleData.velocity.x;
        if (particlePositions[i * 3 + 2] < -400 || particlePositions[i * 3 + 2] > rHalf)
          particleData.velocity.z = -particleData.velocity.z;
        if (effectController.limitConnections && particleData.numConnections >= effectController.maxConnections)
          continue;
        for (let j = i + 1; j < particleCount; j++) {
          const particleDataB = particlesData[j];
          if (effectController.limitConnections && particleDataB.numConnections >= effectController.maxConnections)
            continue;
          const dx = particlePositions[i * 3] - particlePositions[j * 3];
          const dy = particlePositions[i * 3 + 1] - particlePositions[j * 3 + 1];
          const dz = particlePositions[i * 3 + 2] - particlePositions[j * 3 + 2];
          const dist = Math.sqrt(dx * dx + dy * dy + dz * dz);
          if (dist < effectController.minDistance) {
            particleData.numConnections++;
            particleDataB.numConnections++;
            const alpha = 1 - dist / effectController.minDistance;
            positions[vertexpos++] = particlePositions[i * 3];
            positions[vertexpos++] = particlePositions[i * 3 + 1];
            positions[vertexpos++] = particlePositions[i * 3 + 2];
            positions[vertexpos++] = particlePositions[j * 3];
            positions[vertexpos++] = particlePositions[j * 3 + 1];
            positions[vertexpos++] = particlePositions[j * 3 + 2];
            colors[colorpos++] = alpha;
            colors[colorpos++] = alpha;
            colors[colorpos++] = alpha;
            colors[colorpos++] = alpha;
            colors[colorpos++] = alpha;
            colors[colorpos++] = alpha;
            numConnected++;
          }
        }
      }
      linesMesh.geometry.setDrawRange(0, numConnected * 2);
      linesMesh.geometry.attributes.position.needsUpdate = true;
      linesMesh.geometry.attributes.color.needsUpdate = true;
      pointCloud.geometry.attributes.position.needsUpdate = true;
      render();
      stats.update();
    }
    function render() {
      const time = Date.now() * 1e-3;
      group.rotation.y = time * 0.1;
      renderer.render(scene, camera);
    }
  }
};
export {
  exampleInfo as default
};
ts
import { Loader, TypedArray } from 'three';
/**
 * 官网示例的多端使用封装把版本
 * */
export interface OfficialExampleInfo extends MiniProgramMeta {
  /*** 示例名称(保持和官网一致)*/
  name: string;
  /** main */
  init: (context: LoadContext) => void;
}

export interface LoadContext {
  //为了减少官方代码的改动,实际上等同于 canvas
  window: EventTarget & { innerWidth: number; innerHeight: number; devicePixelRatio: number };
  /** HTMLCanvasElement */
  canvas: any;
  /** https://www.npmjs.com/package/lil-gui */
  GUI: any;
  /**
   * https://www.npmjs.com/package/stats.js
   * 也可以使用其他受支持的版本
   * */
  Stats: any;
  /** 收集需要 dispose 的对象(官方示例没有处理这部分)*/
  needToDispose: (...objs: any[]) => void | ((fromFn: () => any[]) => void);

  /**基于 raq 的通用封装 */
  useFrame(animateFunc: (/** ms */ delta: number) => void): { cancel: () => void };

  /** 显示加载模态框 */
  requestLoading(text?: string): Promise<void>;

  /** 隐藏加载模态框*/
  cancelLoading(): void;

  /** 保存文件的通用封装*/
  saveFile(
    fileName: string,
    data: ArrayBuffer | TypedArray | DataView | string
  ): Promise<string | null>;

  /** 示例使用 DracoDecoder 时的资源路径 */
  DecoderPath: {
    GLTF: string;
    STANDARD: string;
  };

  /** 为资源路径拼上 CDN 前缀 */
  withCDNPrefix(path: string): string;

  /**
   * 在小程序中应使用 import { VideoTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/VideoTexture.js';
   * 正常情况(web) 可直接使用 THREE.VideoTexture
   * */
  getVideoTexture(videoOptions: VideoOptions): Promise<[{ isVideoTexture: true }, video: any]>;

  /**
   * 在小程序中应使用 import { CameraTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/CameraTexture.js';
   * 正常情况(web) 可参考示例 webgl_materials_video_webcam
   * */
  getCameraTexture(): { isVideoTexture: true };

  /** 用于动态修改 info 中的占位符*/
  bindInfoText(template: `$${string}$`, initValue?: string): { value: string };

  /** 分屏控件对应的事件回调 */
  onSlideStart(handle: () => void): void;
  /** 分屏控件对应的事件回调 */
  onSlideEnd(handle: () => void): void;
  /** 分屏控件对应的事件回调 */
  onSlideChange(handle: (offset: number, boxSize: number) => void): void;
}

export type VideoOptions = {
  src: string;
  /** 相当于 HTMLVideoElement 的 naturalWidth (小程序中获取不到)*/
  width: number;
  /** 相当于 HTMLVideoElement 的 naturalHeight (小程序中获取不到)*/
  height: number;
  loop?: boolean;
  autoplay?: boolean;
  muted?: boolean;
};

/** 示例小程序中使用的一些配置 */
export interface MiniProgramMeta {
  /** 用于统计加载相关信息 */
  useLoaders: Loader[];
  /** 通用 info */
  info: TagItem[][];
  /** 特殊 info */
  infoPanel?: {
    left?: [string, string][];
    right?: [string, string][];
  };
  /** 分屏控件配置 */
  needSlider?: {
    /** 方向 */
    direction?: 'horizontal' | 'vertical';
    /** 初始偏移 0-100 */
    initPosition?: number;
  };
  /** 操作摇杆控件 */
  needArrowControls?: boolean;
  /** 默认需要的画布类型 */
  canvasType?: '2d' | 'webgl' | 'webgl2';
  /** 为保持效果一致所需要的画布样式 */
  canvasStyle?: {
    bgColor?: string;
    width?: number | string;
    height?: number | string;
  };
  /** 部分示例需要在加载前进行一些提示 */
  initAfterConfirm?: {
    /**
     * 提示类型
     * @default 'default'
     * */
    type?: 'warning' | 'default';
    text: string[];
  };
}

export interface BaseTag<T extends string> {
  tag: T;
  content: string;
}

export interface ATag extends BaseTag<'a'> {
  link: string;
}

export type TextTag = BaseTag<'text'>;

export type TagItem = TextTag | ATag;