diff --git a/public/index.html b/public/index.html index b5e9c3a876d8ae1bf1c7c1307557ff92837c727a..283d0c3308302cb9e3ec75cf60de87964c8f0ed0 100644 --- a/public/index.html +++ b/public/index.html @@ -31,9 +31,10 @@ <a-entity class="head" gltf-model="#dodec-avatar-head" - position="0 0 0" networked-audio-source - audio-feedback + networked-audio-analyser + matcolor-audio-feedback="objectName: DodecAvatar_Head_0" + scale-audio-feedback avatar-customization ></a-entity> </script> @@ -77,7 +78,14 @@ networked="template:#hand-template;showLocalTemplate:false;" > <a-entity id="watch" gltf-model="assets/hud/watch.gltf" position="0 0.0015 0.147" rotation="3.5 0 0"> - <a-circle mute-state-indicator="" position="0 0.023 0" rotation="-90 0 0" scale="0.04 0.04 0.04" material="color:#d8eece;shader:flat"></a-circle> + <a-circle + mute-state-indicator + scale-audio-feedback="analyserSrc: #head; minScale: 0.035; maxScale: 0.08;" + position="0 0.023 0" + rotation="-90 0 0" + scale="0.04 0.04 0.04" + material="color:#d8eece;shader:flat"> + </a-circle> </a-entity> </a-entity> diff --git a/src/components/audio-feedback.js b/src/components/audio-feedback.js index cee9abe0946b0b5c3e950879ae9c45de466b36b2..ee4b802be1ae2e687c2ee9e3b7869a792bac5faa 100644 --- a/src/components/audio-feedback.js +++ b/src/components/audio-feedback.js @@ -1,32 +1,34 @@ -// @TODO break this up into 2 components -// 1. a component that just adds an analyser node and reads audio data -// 2. a component that uses the other component's data to apply some effect -// Communication either happens by dispatching some event or by component 2 directly reading componet 1 -AFRAME.registerComponent("audio-feedback", { - schema: { - audioSource: { type: "selector" } - }, - init: function() { - // @TODO using an arbitrary timeout here which is very bad. Needs to wait on model loading and audio source being connected - setTimeout(() => { - const audioComponent = (this.data.audioSource || this.el).components[ - "networked-audio-source" - ]; +const waitForConnected = function() { + return new Promise(resolve => { + NAF.clientId + ? resolve() + : document.body.addEventListener("connected", resolve); + }); +}; - const audioSource = audioComponent && audioComponent.sound; - if (!audioSource) return; - - this.mat = this.el.object3D.getObjectByName( - "DodecAvatar_Head_0" - ).material; - - this.analyser = audioSource.context.createAnalyser(); - this.levels = new Uint8Array(this.analyser.frequencyBinCount); - audioSource.disconnect(); - audioSource.setFilter(this.analyser); - audioSource.connect(); - console.log(audioSource.filters, audioSource.isPlaying); - }, 5000); +AFRAME.registerComponent("networked-audio-analyser", { + schema: {}, + init() { + waitForConnected() + .then(() => { + const networkedEl = NAF.utils.getNetworkedEntity(this.el); + if (!networkedEl) { + return Promise.reject( + "Audio Analyzer must be added on a node, or a child of a node, with the `networked` component." + ); + } + const ownerId = networkedEl.components.networked.data.owner; + console.log("audio Analyser for " + ownerId); + return NAF.connection.adapter.getMediaStream(ownerId); + }) + .then(stream => { + const ctx = THREE.AudioContext.getContext(); + const source = ctx.createMediaStreamSource(stream); + this.analyser = ctx.createAnalyser(); + this.levels = new Uint8Array(this.analyser.frequencyBinCount); + source.connect(this.analyser); + console.log(source, this.analyser); + }); }, tick: function() { @@ -39,7 +41,80 @@ AFRAME.registerComponent("audio-feedback", { sum += this.levels[i]; } this.volume = sum / this.levels.length; - this.mat.color.setScalar(1 + this.volume / 255 * 2); - this.el.object3D.scale.setScalar(1 + this.volume / 255); + this.el.emit("audioFrequencyChange", { + volume: this.volume, + levels: this.levels + }); + } +}); + +AFRAME.registerComponent("matcolor-audio-feedback", { + schema: { + analyserSrc: { type: "selector" }, + objectName: { type: "string" } + }, + init: function() { + this.onAudioFrequencyChange = this.onAudioFrequencyChange.bind(this); + + this.el.addEventListener("model-loaded", () => { + console.log(this.data.objectName); + this.mat = this.el.object3D.getObjectByName( + this.data.objectName + ).material; + console.log("mat", this.mat); + }); + }, + + play() { + (this.data.analyserSrc || this.el).addEventListener( + "audioFrequencyChange", + this.onAudioFrequencyChange + ); + }, + + pause() { + (this.data.analyserSrc || this.el).removeEventListener( + "audioFrequencyChange", + this.onAudioFrequencyChange + ); + }, + + onAudioFrequencyChange(e) { + if (!this.mat) return; + this.mat.color.setScalar(1 + e.detail.volume / 255 * 2); + } +}); + +AFRAME.registerComponent("scale-audio-feedback", { + schema: { + analyserSrc: { type: "selector" }, + + minScale: { default: 1 }, + maxScale: { default: 2 } + }, + + init() { + this.onAudioFrequencyChange = this.onAudioFrequencyChange.bind(this); + }, + + play() { + (this.data.analyserSrc || this.el).addEventListener( + "audioFrequencyChange", + this.onAudioFrequencyChange + ); + }, + + pause() { + (this.data.analyserSrc || this.el).removeEventListener( + "audioFrequencyChange", + this.onAudioFrequencyChange + ); + }, + + onAudioFrequencyChange(e) { + const { minScale, maxScale } = this.data; + this.el.object3D.scale.setScalar( + minScale + (maxScale - minScale) * e.detail.volume / 255 + ); } }); diff --git a/src/components/mute-state-indicator.js b/src/components/mute-state-indicator.js index 00f61159c0e26ae706f51c4e48b8dbb02957b555..3d1597f93f2ca1df9fd65613c451353bea620121 100644 --- a/src/components/mute-state-indicator.js +++ b/src/components/mute-state-indicator.js @@ -12,8 +12,8 @@ AFRAME.registerComponent("mute-state-indicator", { }, pause() { - this.el.sceneEl.addEventListener("stateadded", this.onStateToggled); - this.el.sceneEl.addEventListener("stateremoved", this.onStateToggled); + this.el.sceneEl.removeEventListener("stateadded", this.onStateToggled); + this.el.sceneEl.removeEventListener("stateremoved", this.onStateToggled); }, onStateToggled(e) {