webaudio_sandbox
对应 three.js 示例地址 。
仅需关注 init
函数的内容,其他部分都是示例小程序所使用的描述配置。
js
import * as THREE from "three";
import { FirstPersonControls } from "three/examples/jsm/controls/FirstPersonControls.js";
/** @type {import("@minisheeep/mp-three-examples").OfficialExampleInfo} */
const exampleInfo = {
name: "webaudio_sandbox",
needArrowControls: true,
initAfterConfirm: {
text: ["注意音量"]
},
useLoaders: [],
info: [
[
{
tag: "a",
link: "https://threejs.org",
content: "three.js"
},
{
tag: "text",
content: "webaudio - sandbox"
}
],
[
{
tag: "text",
content: "music by"
},
{
tag: "a",
link: "http://www.newgrounds.com/audio/listen/358232",
content: "larrylarrybb"
},
{
tag: "text",
content: ","
},
{
tag: "a",
link: "http://www.newgrounds.com/audio/listen/376737",
content: "skullbeatz"
},
{
tag: "text",
content: "and"
},
{
tag: "a",
link: "http://opengameart.org/content/project-utopia-seamless-loop",
content: "congusbongus"
}
],
[
{
tag: "text",
content: "navigate with WASD / arrows / mouse"
}
]
],
init: ({ window, canvas, GUI, Stats, needToDispose, useFrame }) => {
let camera, controls, scene, renderer, light;
let material1, material2, material3;
let analyser1, analyser2, analyser3;
const clock = new THREE.Clock();
function init() {
camera = new THREE.PerspectiveCamera(50, window.innerWidth / window.innerHeight, 1, 1e4);
camera.position.set(0, 25, 0);
const listener = new THREE.AudioListener();
camera.add(listener);
scene = new THREE.Scene();
scene.fog = new THREE.FogExp2(0, 25e-4);
light = new THREE.DirectionalLight(16777215, 3);
light.position.set(0, 0.5, 1).normalize();
scene.add(light);
const sphere = new THREE.SphereGeometry(20, 32, 16);
material1 = new THREE.MeshPhongMaterial({ color: 16755200, flatShading: true, shininess: 0 });
material2 = new THREE.MeshPhongMaterial({ color: 16720384, flatShading: true, shininess: 0 });
material3 = new THREE.MeshPhongMaterial({ color: 6693546, flatShading: true, shininess: 0 });
const mesh1 = new THREE.Mesh(sphere, material1);
mesh1.position.set(-250, 30, 0);
scene.add(mesh1);
const sound1 = new THREE.PositionalAudio(listener);
sound1.setRefDistance(20);
const loader = new THREE.AudioLoader();
loader.load("sounds/358232_j_s_song.mp3", function(buffer) {
sound1.setBuffer(buffer);
sound1.duration = buffer.duration;
sound1.play();
});
mesh1.add(sound1);
const mesh2 = new THREE.Mesh(sphere, material2);
mesh2.position.set(250, 30, 0);
scene.add(mesh2);
const sound2 = new THREE.PositionalAudio(listener);
sound2.setRefDistance(20);
loader.load("sounds/376737_Skullbeatz___Bad_Cat_Maste.mp3", function(buffer) {
sound2.setBuffer(buffer);
sound2.duration = buffer.duration;
sound2.play();
});
mesh2.add(sound2);
const mesh3 = new THREE.Mesh(sphere, material3);
mesh3.position.set(0, 30, -250);
scene.add(mesh3);
const sound3 = new THREE.PositionalAudio(listener);
const oscillator = listener.context.createOscillator();
oscillator.type = "sine";
oscillator.frequency.setValueAtTime(144, sound3.context.currentTime);
oscillator.start(0);
sound3.setNodeSource(oscillator);
sound3.setRefDistance(20);
sound3.setVolume(0.5);
mesh3.add(sound3);
analyser1 = new THREE.AudioAnalyser(sound1, 32);
analyser2 = new THREE.AudioAnalyser(sound2, 32);
analyser3 = new THREE.AudioAnalyser(sound3, 32);
const sound4 = new THREE.Audio(listener);
sound4.setVolume(0.5);
loader.load("sounds/Project_Utopia.mp3", function(buffer) {
sound4.setBuffer(buffer);
sound4.duration = buffer.duration;
sound4.play();
});
const helper = new THREE.GridHelper(1e3, 10, 4473924, 4473924);
helper.position.y = 0.1;
scene.add(helper);
const SoundControls = function() {
this.master = listener.getMasterVolume();
this.firstSphere = sound1.getVolume();
this.secondSphere = sound2.getVolume();
this.thirdSphere = sound3.getVolume();
this.Ambient = sound4.getVolume();
};
const GeneratorControls = function() {
this.frequency = oscillator.frequency.value;
this.wavetype = oscillator.type.toLowerCase();
};
const gui = new GUI();
const soundControls = new SoundControls();
const generatorControls = new GeneratorControls();
const volumeFolder = gui.addFolder("sound volume");
const generatorFolder = gui.addFolder("sound generator");
volumeFolder.add(soundControls, "master").min(0).max(1).step(0.01).onChange(function() {
listener.setMasterVolume(soundControls.master);
});
volumeFolder.add(soundControls, "firstSphere").min(0).max(1).step(0.01).onChange(function() {
sound1.setVolume(soundControls.firstSphere);
});
volumeFolder.add(soundControls, "secondSphere").min(0).max(1).step(0.01).onChange(function() {
sound2.setVolume(soundControls.secondSphere);
});
volumeFolder.add(soundControls, "thirdSphere").min(0).max(1).step(0.01).onChange(function() {
sound3.setVolume(soundControls.thirdSphere);
});
volumeFolder.add(soundControls, "Ambient").min(0).max(1).step(0.01).onChange(function() {
sound4.setVolume(soundControls.Ambient);
});
volumeFolder.open();
generatorFolder.add(generatorControls, "frequency").min(50).max(5e3).step(1).onChange(function() {
oscillator.frequency.setValueAtTime(
generatorControls.frequency,
listener.context.currentTime
);
});
generatorFolder.add(generatorControls, "wavetype", ["sine", "square", "sawtooth", "triangle"]).onChange(function() {
oscillator.type = generatorControls.wavetype;
});
generatorFolder.open();
renderer = new THREE.WebGLRenderer({ antialias: true, canvas });
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setAnimationLoop(animate);
controls = new FirstPersonControls(camera, renderer.domElement);
controls.movementSpeed = 70;
controls.lookSpeed = 0.05;
controls.lookVertical = false;
window.addEventListener("resize", onWindowResize);
needToDispose(renderer, scene, controls, {
dispose: () => {
sound1.disconnect();
sound2.disconnect();
sound3.disconnect();
sound4.disconnect();
}
});
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
controls.handleResize();
}
function animate() {
const delta = clock.getDelta();
controls.update(delta);
material1.emissive.b = analyser1.getAverageFrequency() / 256;
material2.emissive.b = analyser2.getAverageFrequency() / 256;
material3.emissive.b = analyser3.getAverageFrequency() / 256;
renderer.render(scene, camera);
}
init();
}
};
export {
exampleInfo as default
};
ts
import { Loader, TypedArray } from 'three';
/**
* 官网示例的多端使用封装把版本
* */
export interface OfficialExampleInfo extends MiniProgramMeta {
/*** 示例名称(保持和官网一致)*/
name: string;
/** main */
init: (context: LoadContext) => void;
}
export interface LoadContext {
window: EventTarget & { innerWidth: number; innerHeight: number; devicePixelRatio: number };
/** HTMLCanvasElement */
canvas: any;
/** https://www.npmjs.com/package/lil-gui */
GUI: any;
/**
* https://www.npmjs.com/package/stats.js
* 也可以使用其他受支持的版本
* */
Stats: any;
/** 收集需要 dispose 的对象(官方示例没有处理这部分)*/
needToDispose: (...objs: any[]) => void | ((fromFn: () => any[]) => void);
/**基于 raq 的通用封装 */
useFrame(animateFunc: (/** ms */ delta: number) => void): { cancel: () => void };
/** 显示加载模态框 */
requestLoading(text?: string): Promise<void>;
/** 隐藏加载模态框*/
cancelLoading(): void;
/** 保存文件的通用封装*/
saveFile(
fileName: string,
data: ArrayBuffer | TypedArray | DataView | string
): Promise<string | null>;
/** 示例使用 DracoDecoder 时的资源路径 */
DecoderPath: {
GLTF: string;
STANDARD: string;
};
/** 为资源路径拼上 CDN 前缀 */
withCDNPrefix(path: string): string;
/**
* 在小程序中应使用 import { VideoTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/VideoTexture.js';
* 正常情况(web) 可直接使用 THREE.VideoTexture
* */
getVideoTexture(videoOptions: VideoOptions): Promise<[{ isVideoTexture: true }, video: any]>;
/**
* 在小程序中应使用 import { CameraTexture } from '@minisheep/three-platform-adapter/override/jsm/textures/CameraTexture.js';
* 正常情况(web) 可参考示例 webgl_materials_video_webcam
* */
getCameraTexture(): { isVideoTexture: true };
/** 用于动态修改 info 中的占位符*/
bindInfoText(template: `$${string}$`, initValue?: string): { value: string };
/** 分屏控件对应的事件回调 */
onSlideStart(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideEnd(handle: () => void): void;
/** 分屏控件对应的事件回调 */
onSlideChange(handle: (offset: number, boxSize: number) => void): void;
}
export type VideoOptions = {
src: string;
/** 相当于 HTMLVideoElement 的 naturalWidth (小程序中获取不到)*/
width: number;
/** 相当于 HTMLVideoElement 的 naturalHeight (小程序中获取不到)*/
height: number;
loop?: boolean;
autoplay?: boolean;
muted?: boolean;
};
/** 示例小程序中使用的一些配置 */
export interface MiniProgramMeta {
/** 用于统计加载相关信息 */
useLoaders: Record<string, Loader>;
/** 通用 info */
info: TagItem[][];
/** 特殊 info */
infoPanel?: {
left?: [string, string][];
right?: [string, string][];
};
/** 分屏控件配置 */
needSlider?: {
/** 方向 */
direction?: 'horizontal' | 'vertical';
/** 初始偏移 0-100 */
initPosition?: number;
};
/** 操作摇杆控件 */
needArrowControls?: boolean;
/** 默认需要的画布类型 */
canvasType?: '2d' | 'webgl' | 'webgl2';
/** 为保持效果一致所需要的画布样式 */
canvasStyle?: {
bgColor?: string;
width?: number | string;
height?: number | string;
};
/** 部分示例需要在加载前进行一些提示 */
initAfterConfirm?: {
/**
* 提示类型
* @default 'default'
* */
type?: 'warning' | 'default';
text: string[];
};
}
export interface BaseTag<T extends string> {
tag: T;
content: string;
}
export interface ATag extends BaseTag<'a'> {
link: string;
}
export type TextTag = BaseTag<'text'>;
export type TagItem = TextTag | ATag;