diff --git a/src/assets/stylesheets/entry.scss b/src/assets/stylesheets/entry.scss index d30ffc98bc332d450d24dc29f3e2ebbcf5d21772..2915f56da8f98f3fc79bda2b147d3750179c5dbc 100644 --- a/src/assets/stylesheets/entry.scss +++ b/src/assets/stylesheets/entry.scss @@ -18,6 +18,29 @@ flex-direction: column; flex: 10 1 auto; justify-content: center; + + &__screen-sharing { + font-size: 1.4em; + margin-left: 2.95em; + margin-top: 0.6em; + } + + &__screen-sharing-checkbox { + appearance: none; + -moz-appearance: none; + -webkit-appearance: none; + width: 2em; + height: 2em; + border: 3px solid white; + border-radius: 9px; + vertical-align: sub; + margin: 0 0.6em + } + &__screen-sharing-checkbox:checked { + border: 9px double white; + outline: 9px solid white; + outline-offset: -18px; + } } .entry-panel__secondary { diff --git a/src/assets/translations.data.json b/src/assets/translations.data.json index e10a95ae40bb533716f17b93f15d0f196a6de13e..04b25e90c782c04b60860524f98be0600627fcfe 100644 --- a/src/assets/translations.data.json +++ b/src/assets/translations.data.json @@ -12,6 +12,7 @@ "entry.daydream-prefix": "Enter on ", "entry.daydream-medium": "Daydream", "entry.daydream-via-chrome": "Using Google Chrome", + "entry.enable-screen-sharing": "Share my desktop", "profile.save": "SAVE", "profile.display_name.validation_warning": "Alphanumerics and hyphens. At least 3 characters, no more than 32", "profile.header": "Your identity", diff --git a/src/components/networked-video-player.js b/src/components/networked-video-player.js index 03cfba4f6d7a835ac825804724a4109d16690cdb..51adcf2f7f2ddd6d0023c54442e03627ea952c70 100644 --- a/src/components/networked-video-player.js +++ b/src/components/networked-video-player.js @@ -1,3 +1,5 @@ +import queryString from "query-string"; + import styles from "./networked-video-player.css"; const nafConnected = function() { @@ -9,14 +11,6 @@ const nafConnected = function() { AFRAME.registerComponent("networked-video-player", { schema: {}, async init() { - let container = document.getElementById("nvp-debug-container"); - if (!container) { - container = document.createElement("div"); - container.id = "nvp-debug-container"; - container.classList.add(styles.container); - document.body.appendChild(container); - } - await nafConnected(); const networkedEl = await NAF.utils.getNetworkedEntity(this.el); @@ -25,6 +19,24 @@ AFRAME.registerComponent("networked-video-player", { } const ownerId = networkedEl.components.networked.data.owner; + + const qs = queryString.parse(location.search); + const rejectScreenShares = qs.accept_screen_shares === undefined; + if (ownerId !== NAF.clientId && rejectScreenShares) { + // Toggle material visibility since object visibility is network-synced + // TODO: There ought to be a better way to disable network syncs on a remote entity + this.el.setAttribute("material", {visible: false}); + return; + } + + let container = document.getElementById("nvp-debug-container"); + if (!container) { + container = document.createElement("div"); + container.id = "nvp-debug-container"; + container.classList.add(styles.container); + document.body.appendChild(container); + } + const stream = await NAF.connection.adapter.getMediaStream(ownerId, "video"); if (!stream) { return; diff --git a/src/react-components/ui-root.js b/src/react-components/ui-root.js index f70ecf30f56a12ffb0ee9914dda1ab91d374243a..26ffdfbf3d55227218ea4c36f3752fbe888461df 100644 --- a/src/react-components/ui-root.js +++ b/src/react-components/ui-root.js @@ -41,11 +41,6 @@ async function grantedMicLabels() { return mediaDevices.filter(d => d.label && d.kind === "audioinput").map(d => d.label); } -async function hasGrantedMicPermissions() { - const micLabels = await grantedMicLabels(); - return micLabels.length > 0; -} - // This is a list of regexes that match the microphone labels of HMDs. // // If entering VR mode, and if any of these regexes match an audio device, @@ -64,6 +59,7 @@ class UIRoot extends Component { concurrentLoadDetector: PropTypes.object, disableAutoExitOnConcurrentLoad: PropTypes.bool, forcedVREntryType: PropTypes.string, + enableScreenSharing: PropTypes.bool, store: PropTypes.object, scene: PropTypes.object }; @@ -74,7 +70,10 @@ class UIRoot extends Component { enterInVR: false, shareScreen: false, + requestedScreen: false, mediaStream: null, + videoTrack: null, + audioTrack: null, toneInterval: null, tonePlaying: false, @@ -209,12 +208,27 @@ class UIRoot extends Component { }); }; + hasGrantedMicPermissions = async () => { + if (this.state.requestedScreen) { + // There is no way to tell if you've granted mic permissions in a previous session if we've + // already prompted for screen sharing permissions, so we have to assume that we've never granted permissions. + // Fortunately, if you *have* granted permissions permanently, there won't be a second browser prompt, but we + // can't determine that before hand. + // See https://bugzilla.mozilla.org/show_bug.cgi?id=1449783 for a potential solution in the future. + return false; + } + else { + // If we haven't requested the screen in this session, check if we've granted permissions in a previous session. + return (await grantedMicLabels()).length > 0; + } + } + performDirectEntryFlow = async enterInVR => { this.startTestTone(); this.setState({ enterInVR }); - const hasGrantedMic = await hasGrantedMicPermissions(); + const hasGrantedMic = await this.hasGrantedMicPermissions(); if (hasGrantedMic) { await this.setMediaStreamToDefault(); @@ -268,37 +282,60 @@ class UIRoot extends Component { } }; - mediaVideoConstraint = () => { - return this.state.shareScreen ? { mediaSource: "screen", height: 720, frameRate: 30 } : false; - }; - micDeviceChanged = async ev => { - const constraints = { audio: { deviceId: { exact: [ev.target.value] } }, video: this.mediaVideoConstraint() }; - await this.setupNewMediaStream(constraints); - }; + const constraints = { audio: { deviceId: { exact: [ev.target.value] } } }; + await this.fetchAudioTrack(constraints); + await this.setupNewMediaStream(); + } setMediaStreamToDefault = async () => { - await this.setupNewMediaStream({ audio: true, video: false }); - }; - - setupNewMediaStream = async constraints => { - const AudioContext = window.AudioContext || window.webkitAudioContext; - const audioContext = new AudioContext(); + await this.fetchAudioTrack({ audio: true }); + await this.setupNewMediaStream(); + } - if (this.state.mediaStream) { - clearInterval(this.state.micUpdateInterval); + setStateAndRequestScreen = async e => { + const checked = e.target.checked; + await this.setState({ requestedScreen: true, shareScreen: checked }); + if (checked) { + this.fetchVideoTrack({ video: { + mediaSource: "screen", + // Work around BMO 1449832 by calculating the width. This will break for multi monitors if you share anything + // other than your current monitor that has a different aspect ratio. + width: screen.width / screen.height * 720, + height: 720, + frameRate: 30 + } }); + } + else { + this.setState({ videoTrack: null }); + } + } - const previousStream = this.state.mediaStream; + fetchVideoTrack = async constraints => { + const mediaStream = await navigator.mediaDevices.getUserMedia(constraints); + this.setState({ videoTrack: mediaStream.getVideoTracks()[0] }); + } - for (const tracks of [previousStream.getAudioTracks(), previousStream.getVideoTracks()]) { - for (const track of tracks) { - track.stop(); - } - } + fetchAudioTrack = async constraints => { + if (this.state.audioTrack) { + this.state.audioTrack.stop(); } - const mediaStream = await navigator.mediaDevices.getUserMedia(constraints); + this.setState({ audioTrack: mediaStream.getAudioTracks()[0] }); + } + + setupNewMediaStream = async constraints => { + const mediaStream = new MediaStream(); + + // we should definitely have an audioTrack at this point. + mediaStream.addTrack(this.state.audioTrack); + if (this.state.videoTrack) { + mediaStream.addTrack(this.state.videoTrack); + } + + const AudioContext = window.AudioContext || window.webkitAudioContext; + const audioContext = new AudioContext(); const source = audioContext.createMediaStreamSource(mediaStream); const analyzer = audioContext.createAnalyser(); const levels = new Uint8Array(analyzer.fftSize); @@ -343,8 +380,10 @@ class UIRoot extends Component { fetchMicDevices = async () => { const mediaDevices = await navigator.mediaDevices.enumerateDevices(); - this.setState({ - micDevices: mediaDevices.filter(d => d.kind === "audioinput").map(d => ({ deviceId: d.deviceId, label: d.label })) + this.setState({ + micDevices: mediaDevices. + filter(d => d.kind === "audioinput"). + map(d => ({ deviceId: d.deviceId, label: d.label })) }); }; @@ -427,29 +466,47 @@ class UIRoot extends Component { const daydreamMaybeSubtitle = messages["entry.daydream-via-chrome"]; - const entryPanel = + // Only show this in desktop firefox since other browsers/platforms will ignore the "screen" media constraint and + // will attempt to share your webcam instead! + const screenSharingCheckbox = ( + this.props.enableScreenSharing && + !mobiledetect.mobile() && + /firefox/i.test(navigator.userAgent) && + ( + <label className="entry-panel__screen-sharing"> + <input className="entry-panel__screen-sharing-checkbox" type="checkbox" + value={this.state.shareScreen} + onChange={this.setStateAndRequestScreen} + /> + <FormattedMessage id="entry.enable-screen-sharing" /> + </label> + ) + ); + + const entryPanel = this.state.entryStep === ENTRY_STEPS.start ? ( <div className="entry-panel"> <TwoDEntryButton onClick={this.enter2D} /> - {this.state.availableVREntryTypes.generic !== VR_DEVICE_AVAILABILITY.no && ( - <GenericEntryButton onClick={this.enterVR} /> + { this.state.availableVREntryTypes.generic !== VR_DEVICE_AVAILABILITY.no && ( + <GenericEntryButton onClick={this.enterVR} /> )} - {this.state.availableVREntryTypes.gearvr !== VR_DEVICE_AVAILABILITY.no && ( - <GearVREntryButton onClick={this.enterGearVR} /> + { this.state.availableVREntryTypes.gearvr !== VR_DEVICE_AVAILABILITY.no && ( + <GearVREntryButton onClick={this.enterGearVR} /> )} - {this.state.availableVREntryTypes.daydream !== VR_DEVICE_AVAILABILITY.no && ( + { this.state.availableVREntryTypes.daydream !== VR_DEVICE_AVAILABILITY.no && ( <DaydreamEntryButton onClick={this.enterDaydream} subtitle={ - this.state.availableVREntryTypes.daydream == VR_DEVICE_AVAILABILITY.maybe ? daydreamMaybeSubtitle : "" + this.state.availableVREntryTypes.daydream == VR_DEVICE_AVAILABILITY.maybe ? daydreamMaybeSubtitle : "" } - /> + /> )} {this.state.availableVREntryTypes.cardboard !== VR_DEVICE_AVAILABILITY.no && ( <div className="entry-panel__secondary" onClick={this.enterVR}> <FormattedMessage id="entry.cardboard" /> </div> )} + { screenSharingCheckbox } </div> ) : null; diff --git a/src/room.html b/src/room.html index 2984baa1ed1260320064668d631894113441ee81..132e6af23c42d61553fc1c78c43ff0afc0cf188b 100644 --- a/src/room.html +++ b/src/room.html @@ -49,7 +49,7 @@ <!-- Templates --> <template id="video-template"> - <a-entity class="video" geometry="primitive: plane;" material="side: double" networked-video-player></a-entity> + <a-entity class="video" geometry="primitive: plane;" material="side: double; shader: flat;" networked-video-player></a-entity> </template> <template id="remote-avatar-template"> diff --git a/src/room.js b/src/room.js index 576f61d8e59bb557e8b6e2ac847c9c98e5f7c888..909e8977f7bd4ce6d30e6ce4ddfc183a7e685c3b 100644 --- a/src/room.js +++ b/src/room.js @@ -86,6 +86,12 @@ import { generateDefaultProfile } from "./utils/identity.js"; import { getAvailableVREntryTypes } from "./utils/vr-caps-detect.js"; import ConcurrentLoadDetector from "./utils/concurrent-load-detector.js"; +function qsTruthy(param) { + const val = qs[param]; + // if the param exists but is not set (e.g. "?foo&bar"), its value is null. + return val === null || /1|on|true/i.test(val); +} + registerTelemetry(); AFRAME.registerInputBehaviour("vive_trackpad_dpad4", vive_trackpad_dpad4); @@ -101,32 +107,6 @@ concurrentLoadDetector.start(); // Always layer in any new default profile bits store.update({ profile: { ...generateDefaultProfile(), ...(store.state.profile || {}) } }); -async function shareMedia(audio, video) { - const constraints = { - audio: !!audio, - video: video ? { mediaSource: "screen", height: 720, frameRate: 30 } : false - }; - const mediaStream = await navigator.mediaDevices.getUserMedia(constraints); - NAF.connection.adapter.setLocalMediaStream(mediaStream); - - const id = `${NAF.clientId}-screen`; - let entity = document.getElementById(id); - if (entity) { - entity.setAttribute("visible", !!video); - } else if (video) { - const sceneEl = document.querySelector("a-scene"); - entity = document.createElement("a-entity"); - entity.id = id; - entity.setAttribute("offset-relative-to", { - target: "#player-camera", - offset: "0 0 -2", - on: "action_share_screen" - }); - entity.setAttribute("networked", { template: "#video-template" }); - sceneEl.appendChild(entity); - } -} - async function exitScene() { const scene = document.querySelector("a-scene"); scene.renderer.animate(null); // Stop animation loop, TODO A-Frame should do this @@ -163,36 +143,50 @@ async function enterScene(mediaStream, enterInVR) { audio: true, debug: true, connectOnLoad: false, - room: qs.room && !isNaN(parseInt(qs.room)) ? parseInt(qs.room) : 1, + room: qs.room && !isNaN(parseInt(qs.room, 10)) ? parseInt(qs.room, 10) : 1, serverURL: process.env.JANUS_SERVER }); - if (!qs.stats || !/off|false|0/.test(qs.stats)) { + if (!qsTruthy("no_stats")) { scene.setAttribute("stats", true); } - if (isMobile || qs.mobile) { + if (isMobile || qsTruthy(qs.mobile)) { playerRig.setAttribute("virtual-gamepad-controls", {}); } updatePlayerInfoFromStore(); store.addEventListener("statechanged", updatePlayerInfoFromStore); - const avatarScale = parseInt(qs.avatarScale, 10); + const avatarScale = parseInt(qs.avatar_scale, 10); if (avatarScale) { playerRig.setAttribute("scale", { x: avatarScale, y: avatarScale, z: avatarScale }); } - let sharingScreen = false; + const videoTracks = mediaStream.getVideoTracks(); + let sharingScreen = videoTracks.length > 0; + + const screenEntityId = `${NAF.clientId}-screen`; + let screenEntity = document.getElementById(screenEntityId); - // TODO remove scene.addEventListener("action_share_screen", () => { sharingScreen = !sharingScreen; - shareMedia(true, sharingScreen); + if (sharingScreen) { + for (const track of videoTracks) { + mediaStream.addTrack(track); + } + } + else { + for (const track of mediaStream.getVideoTracks()) { + mediaStream.removeTrack(track); + } + } + NAF.connection.adapter.setLocalMediaStream(mediaStream); + screenEntity.setAttribute("visible", sharingScreen); }); - if (qs.offline) { + if (qsTruthy("offline")) { onConnect(); } else { document.body.addEventListener("connected", onConnect); @@ -202,23 +196,19 @@ async function enterScene(mediaStream, enterInVR) { if (mediaStream) { NAF.connection.adapter.setLocalMediaStream(mediaStream); - const hasVideo = !!(mediaStream.getVideoTracks().length > 0); - - const id = `${NAF.clientId}-screen`; - let entity = document.getElementById(id); - if (entity) { - entity.setAttribute("visible", hasVideo); - } else if (hasVideo) { + if (screenEntity) { + screenEntity.setAttribute("visible", sharingScreen); + } else if (sharingScreen) { const sceneEl = document.querySelector("a-scene"); - entity = document.createElement("a-entity"); - entity.id = id; - entity.setAttribute("offset-relative-to", { - target: "#head", + screenEntity = document.createElement("a-entity"); + screenEntity.id = screenEntityId; + screenEntity.setAttribute("offset-relative-to", { + target: "#player-camera", offset: "0 0 -2", on: "action_share_screen" }); - entity.setAttribute("networked", { template: "#video-template" }); - sceneEl.appendChild(entity); + screenEntity.setAttribute("networked", { template: "#video-template" }); + sceneEl.appendChild(screenEntity); } } } @@ -228,12 +218,9 @@ function onConnect() {} function mountUI(scene) { const qs = queryString.parse(location.search); - const disableAutoExitOnConcurrentLoad = qs.allow_multi === "true"; - let forcedVREntryType = null; - - if (qs.vr_entry_type) { - forcedVREntryType = qs.vr_entry_type; - } + const disableAutoExitOnConcurrentLoad = qsTruthy("allow_multi"); + const forcedVREntryType = qs.vr_entry_type || null; + const enableScreenSharing = qsTruthy("enable_screen_sharing"); const uiRoot = ReactDOM.render( <UIRoot @@ -244,6 +231,7 @@ function mountUI(scene) { concurrentLoadDetector, disableAutoExitOnConcurrentLoad, forcedVREntryType, + enableScreenSharing, store }} />,