Skip to content

webgl_custom_attributes_points3

对应 three.js 示例地址

仅需关注 init 函数的内容,其他部分都是示例小程序所使用的描述配置。

js
import * as THREE from "three";
import * as BufferGeometryUtils from "three/examples/jsm/utils/BufferGeometryUtils.js";

/** @type {import("@minisheeep/mp-three-examples").OfficialExampleInfo} */
const exampleInfo = {
  name: "webgl_custom_attributes_points3",
  useLoaders: [],
  info: [
    [
      {
        tag: "a",
        link: "https://threejs.org",
        content: "three.js"
      },
      {
        tag: "text",
        content: "- custom attributes example - billboards - alphatest"
      }
    ]
  ],
  init: ({ window, canvas, GUI, Stats, needToDispose, useFrame }) => {
    let renderer, scene, camera, stats;
    let object;
    let vertices1;
    const WIDTH = window.innerWidth;
    const HEIGHT = window.innerHeight;
    init();
    function init() {
      camera = new THREE.PerspectiveCamera(40, WIDTH / HEIGHT, 1, 1e3);
      camera.position.z = 500;
      scene = new THREE.Scene();
      let radius = 100;
      const inner = 0.6 * radius;
      const vertex = new THREE.Vector3();
      const vertices = [];
      for (let i = 0; i < 1e5; i++) {
        vertex.x = Math.random() * 2 - 1;
        vertex.y = Math.random() * 2 - 1;
        vertex.z = Math.random() * 2 - 1;
        vertex.multiplyScalar(radius);
        if (vertex.x > inner || vertex.x < -inner || vertex.y > inner || vertex.y < -inner || vertex.z > inner || vertex.z < -inner)
          vertices.push(vertex.x, vertex.y, vertex.z);
      }
      vertices1 = vertices.length / 3;
      radius = 200;
      let boxGeometry1 = new THREE.BoxGeometry(radius, 0.1 * radius, 0.1 * radius, 50, 5, 5);
      boxGeometry1.deleteAttribute("normal");
      boxGeometry1.deleteAttribute("uv");
      boxGeometry1 = BufferGeometryUtils.mergeVertices(boxGeometry1);
      const matrix = new THREE.Matrix4();
      const position = new THREE.Vector3();
      const rotation = new THREE.Euler();
      const quaternion = new THREE.Quaternion();
      const scale = new THREE.Vector3(1, 1, 1);
      function addGeo(geo, x, y, z, ry) {
        position.set(x, y, z);
        rotation.set(0, ry, 0);
        matrix.compose(position, quaternion.setFromEuler(rotation), scale);
        const positionAttribute2 = geo.getAttribute("position");
        for (let i = 0, l = positionAttribute2.count; i < l; i++) {
          vertex.fromBufferAttribute(positionAttribute2, i);
          vertex.applyMatrix4(matrix);
          vertices.push(vertex.x, vertex.y, vertex.z);
        }
      }
      addGeo(boxGeometry1, 0, 110, 110, 0);
      addGeo(boxGeometry1, 0, 110, -110, 0);
      addGeo(boxGeometry1, 0, -110, 110, 0);
      addGeo(boxGeometry1, 0, -110, -110, 0);
      addGeo(boxGeometry1, 110, 110, 0, Math.PI / 2);
      addGeo(boxGeometry1, 110, -110, 0, Math.PI / 2);
      addGeo(boxGeometry1, -110, 110, 0, Math.PI / 2);
      addGeo(boxGeometry1, -110, -110, 0, Math.PI / 2);
      let boxGeometry2 = new THREE.BoxGeometry(0.1 * radius, radius * 1.2, 0.1 * radius, 5, 60, 5);
      boxGeometry2.deleteAttribute("normal");
      boxGeometry2.deleteAttribute("uv");
      boxGeometry2 = BufferGeometryUtils.mergeVertices(boxGeometry2);
      addGeo(boxGeometry2, 110, 0, 110, 0);
      addGeo(boxGeometry2, 110, 0, -110, 0);
      addGeo(boxGeometry2, -110, 0, 110, 0);
      addGeo(boxGeometry2, -110, 0, -110, 0);
      const positionAttribute = new THREE.Float32BufferAttribute(vertices, 3);
      const colors = [];
      const sizes = [];
      const color = new THREE.Color();
      for (let i = 0; i < positionAttribute.count; i++) {
        if (i < vertices1) {
          color.setHSL(0.5 + 0.2 * (i / vertices1), 1, 0.5);
        } else {
          color.setHSL(0.1, 1, 0.5);
        }
        color.toArray(colors, i * 3);
        sizes[i] = i < vertices1 ? 10 : 40;
      }
      const geometry = new THREE.BufferGeometry();
      geometry.setAttribute("position", positionAttribute);
      geometry.setAttribute("ca", new THREE.Float32BufferAttribute(colors, 3));
      geometry.setAttribute("size", new THREE.Float32BufferAttribute(sizes, 1));
      const texture = new THREE.TextureLoader().load("textures/sprites/ball.png");
      texture.wrapS = THREE.RepeatWrapping;
      texture.wrapT = THREE.RepeatWrapping;
      const material = new THREE.ShaderMaterial({
        uniforms: {
          amplitude: { value: 1 },
          color: { value: new THREE.Color(16777215) },
          pointTexture: { value: texture }
        },
        vertexShader: `

			attribute float size;
			attribute vec4 ca;

			varying vec4 vColor;

			void main() {

				vColor = ca;

				vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );

				gl_PointSize = size * ( 150.0 / -mvPosition.z );

				gl_Position = projectionMatrix * mvPosition;

			}

		`,
        fragmentShader: `

			uniform vec3 color;
			uniform sampler2D pointTexture;

			varying vec4 vColor;

			void main() {

				vec4 outColor = texture2D( pointTexture, gl_PointCoord );

				if ( outColor.a < 0.5 ) discard;

				gl_FragColor = outColor * vec4( color * vColor.xyz, 1.0 );

				float depth = gl_FragCoord.z / gl_FragCoord.w;
				const vec3 fogColor = vec3( 0.0 );

				float fogFactor = smoothstep( 200.0, 600.0, depth );
				gl_FragColor = mix( gl_FragColor, vec4( fogColor, gl_FragColor.w ), fogFactor );

			}

		`
      });
      object = new THREE.Points(geometry, material);
      scene.add(object);
      renderer = new THREE.WebGLRenderer({ canvas });
      renderer.setPixelRatio(window.devicePixelRatio);
      renderer.setSize(WIDTH, HEIGHT);
      renderer.setAnimationLoop(animate);
      stats = new Stats(renderer);
      window.addEventListener("resize", onWindowResize);
      needToDispose(renderer, scene);
    }
    function onWindowResize() {
      camera.aspect = window.innerWidth / window.innerHeight;
      camera.updateProjectionMatrix();
      renderer.setSize(window.innerWidth, window.innerHeight);
    }
    function animate() {
      render();
      stats.update();
    }
    function render() {
      const time = Date.now() * 0.01;
      object.rotation.y = object.rotation.z = 0.02 * time;
      const geometry = object.geometry;
      const attributes = geometry.attributes;
      for (let i = 0; i < attributes.size.array.length; i++) {
        if (i < vertices1) {
          attributes.size.array[i] = Math.max(0, 26 + 32 * Math.sin(0.1 * i + 0.6 * time));
        }
      }
      attributes.size.needsUpdate = true;
      renderer.render(scene, camera);
    }
  }
};
export {
  exampleInfo as default
};
ts
import { Loader, TypedArray } from 'three';
/**
 * 官网示例的多端使用封装把版本
 * */
export interface OfficialExampleInfo extends MiniProgramMeta {
  /*** 示例名称(保持和官网一致)*/
  name: string;
  /** main */
  init: (context: LoadContext) => void;
}

export interface LoadContext {
  //为了减少官方代码的改动,实际上等同于 canvas
  window: EventTarget & { innerWidth: number; innerHeight: number; devicePixelRatio: number };
  /** HTMLCanvasElement */
  canvas: any;
  /** https://www.npmjs.com/package/lil-gui */
  GUI: any;
  /**
   * https://www.npmjs.com/package/stats.js
   * 也可以使用其他受支持的版本
   * */
  Stats: any;
  /** 收集需要 dispose 的对象(官方示例没有处理这部分)*/
  needToDispose: (...objs: any[]) => void | ((fromFn: () => any[]) => void);

  /**基于 raq 的通用封装 */
  useFrame(animateFunc: (/** ms */ delta: number) => void): { cancel: () => void };

  /** 显示加载模态框 */
  requestLoading(text?: string): Promise<void>;

  /** 隐藏加载模态框*/
  cancelLoading(): void;

  /** 保存文件的通用封装*/
  saveFile(
    fileName: string,
    data: ArrayBuffer | TypedArray | DataView | string
  ): Promise<string | null>;

  /** 示例使用 DracoDecoder 时的资源路径 */
  DecoderPath: {
    GLTF: string;
    STANDARD: string;
  };

  /** 为资源路径拼上 CDN 前缀 */
  withCDNPrefix(path: string): string;

  /**
   * 在小程序中应使用 import { VideoTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/VideoTexture.js';
   * 正常情况(web) 可直接使用 THREE.VideoTexture
   * */
  getVideoTexture(videoOptions: VideoOptions): Promise<[{ isVideoTexture: true }, video: any]>;

  /**
   * 在小程序中应使用 import { CameraTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/CameraTexture.js';
   * 正常情况(web) 可参考示例 webgl_materials_video_webcam
   * */
  getCameraTexture(): { isVideoTexture: true };

  /** 用于动态修改 info 中的占位符*/
  bindInfoText(template: `$${string}$`, initValue?: string): { value: string };

  /** 分屏控件对应的事件回调 */
  onSlideStart(handle: () => void): void;
  /** 分屏控件对应的事件回调 */
  onSlideEnd(handle: () => void): void;
  /** 分屏控件对应的事件回调 */
  onSlideChange(handle: (offset: number, boxSize: number) => void): void;
}

export type VideoOptions = {
  src: string;
  /** 相当于 HTMLVideoElement 的 naturalWidth (小程序中获取不到)*/
  width: number;
  /** 相当于 HTMLVideoElement 的 naturalHeight (小程序中获取不到)*/
  height: number;
  loop?: boolean;
  autoplay?: boolean;
  muted?: boolean;
};

/** 示例小程序中使用的一些配置 */
export interface MiniProgramMeta {
  /** 用于统计加载相关信息 */
  useLoaders: Loader[];
  /** 通用 info */
  info: TagItem[][];
  /** 特殊 info */
  infoPanel?: {
    left?: [string, string][];
    right?: [string, string][];
  };
  /** 分屏控件配置 */
  needSlider?: {
    /** 方向 */
    direction?: 'horizontal' | 'vertical';
    /** 初始偏移 0-100 */
    initPosition?: number;
  };
  /** 操作摇杆控件 */
  needArrowControls?: boolean;
  /** 默认需要的画布类型 */
  canvasType?: '2d' | 'webgl' | 'webgl2';
  /** 为保持效果一致所需要的画布样式 */
  canvasStyle?: {
    bgColor?: string;
    width?: number | string;
    height?: number | string;
  };
  /** 部分示例需要在加载前进行一些提示 */
  initAfterConfirm?: {
    /**
     * 提示类型
     * @default 'default'
     * */
    type?: 'warning' | 'default';
    text: string[];
  };
}

export interface BaseTag<T extends string> {
  tag: T;
  content: string;
}

export interface ATag extends BaseTag<'a'> {
  link: string;
}

export type TextTag = BaseTag<'text'>;

export type TagItem = TextTag | ATag;