1 /* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
5 import { XPCOMUtils } from "resource://gre/modules/XPCOMUtils.sys.mjs";
6 import { AppConstants } from "resource://gre/modules/AppConstants.sys.mjs";
9 XPCOMUtils.defineLazyServiceGetter(
11 "MediaManagerService",
12 "@mozilla.org/mediaManagerService;1",
13 "nsIMediaManagerService"
16 const kBrowserURL = AppConstants.BROWSER_CHROME_URL;
19 * GlobalMuteListener is a process-global object that listens for changes to
20 * the global mute state of the camera and microphone. When it notices a
21 * change in that state, it tells the underlying platform code to mute or
22 * unmute those devices.
24 const GlobalMuteListener = {
28 * Initializes the listener if it hasn't been already. This will also
29 * ensure that the microphone and camera are initially in the right
34 Services.cpmm.sharedData.addEventListener("change", this);
35 this._updateCameraMuteState();
36 this._updateMicrophoneMuteState();
42 if (event.changedKeys.includes("WebRTC:GlobalCameraMute")) {
43 this._updateCameraMuteState();
45 if (event.changedKeys.includes("WebRTC:GlobalMicrophoneMute")) {
46 this._updateMicrophoneMuteState();
50 _updateCameraMuteState() {
51 let shouldMute = Services.cpmm.sharedData.get("WebRTC:GlobalCameraMute");
52 let topic = shouldMute
53 ? "getUserMedia:muteVideo"
54 : "getUserMedia:unmuteVideo";
55 Services.obs.notifyObservers(null, topic);
58 _updateMicrophoneMuteState() {
59 let shouldMute = Services.cpmm.sharedData.get(
60 "WebRTC:GlobalMicrophoneMute"
62 let topic = shouldMute
63 ? "getUserMedia:muteAudio"
64 : "getUserMedia:unmuteAudio";
66 Services.obs.notifyObservers(null, topic);
70 export class WebRTCChild extends JSWindowActorChild {
72 // The user might request that DOM notifications be silenced
73 // when sharing the screen. There doesn't seem to be a great
74 // way of storing that state in any of the objects going into
75 // the WebRTC API or coming out via the observer notification
76 // service, so we store it here on the actor.
78 // If the user chooses to silence notifications during screen
79 // share, this will get set to true.
80 this.suppressNotifications = false;
83 // Called only for 'unload' to remove pending gUM prompts in reloaded frames.
84 static handleEvent(aEvent) {
85 let contentWindow = aEvent.target.defaultView;
86 let actor = getActorForWindow(contentWindow);
88 for (let key of contentWindow.pendingGetUserMediaRequests.keys()) {
89 actor.sendAsyncMessage("webrtc:CancelRequest", key);
91 for (let key of contentWindow.pendingPeerConnectionRequests.keys()) {
92 actor.sendAsyncMessage("rtcpeer:CancelRequest", key);
97 // This observer is called from BrowserProcessChild to avoid
98 // loading this .jsm when WebRTC is not in use.
99 static observe(aSubject, aTopic, aData) {
101 case "getUserMedia:request":
102 handleGUMRequest(aSubject, aTopic, aData);
104 case "recording-device-stopped":
105 handleGUMStop(aSubject, aTopic, aData);
107 case "PeerConnection:request":
108 handlePCRequest(aSubject, aTopic, aData);
110 case "recording-device-events":
111 updateIndicators(aSubject, aTopic, aData);
113 case "recording-window-ended":
114 removeBrowserSpecificIndicator(aSubject, aTopic, aData);
119 receiveMessage(aMessage) {
120 switch (aMessage.name) {
121 case "rtcpeer:Allow":
122 case "rtcpeer:Deny": {
123 let callID = aMessage.data.callID;
124 let contentWindow = Services.wm.getOuterWindowWithId(
125 aMessage.data.windowID
127 forgetPCRequest(contentWindow, callID);
129 aMessage.name == "rtcpeer:Allow"
130 ? "PeerConnection:response:allow"
131 : "PeerConnection:response:deny";
132 Services.obs.notifyObservers(null, topic, callID);
135 case "webrtc:Allow": {
136 let callID = aMessage.data.callID;
137 let contentWindow = Services.wm.getOuterWindowWithId(
138 aMessage.data.windowID
140 let devices = contentWindow.pendingGetUserMediaRequests.get(callID);
141 forgetGUMRequest(contentWindow, callID);
143 let allowedDevices = Cc["@mozilla.org/array;1"].createInstance(
146 for (let deviceIndex of aMessage.data.devices) {
147 allowedDevices.appendElement(devices[deviceIndex]);
150 Services.obs.notifyObservers(
152 "getUserMedia:response:allow",
156 this.suppressNotifications = !!aMessage.data.suppressNotifications;
161 denyGUMRequest(aMessage.data);
163 case "webrtc:StopSharing":
164 Services.obs.notifyObservers(
166 "getUserMedia:revoke",
170 case "webrtc:MuteCamera":
171 Services.obs.notifyObservers(
173 "getUserMedia:muteVideo",
177 case "webrtc:UnmuteCamera":
178 Services.obs.notifyObservers(
180 "getUserMedia:unmuteVideo",
184 case "webrtc:MuteMicrophone":
185 Services.obs.notifyObservers(
187 "getUserMedia:muteAudio",
191 case "webrtc:UnmuteMicrophone":
192 Services.obs.notifyObservers(
194 "getUserMedia:unmuteAudio",
202 function getActorForWindow(window) {
204 let windowGlobal = window.windowGlobalChild;
206 return windowGlobal.getActor("WebRTC");
209 // There might not be an actor for a parent process chrome URL,
210 // and we may not even be allowed to access its windowGlobalChild.
216 function handlePCRequest(aSubject, aTopic, aData) {
217 let { windowID, innerWindowID, callID, isSecure } = aSubject;
218 let contentWindow = Services.wm.getOuterWindowWithId(windowID);
219 if (!contentWindow.pendingPeerConnectionRequests) {
220 setupPendingListsInitially(contentWindow);
222 contentWindow.pendingPeerConnectionRequests.add(callID);
228 documentURI: contentWindow.document.documentURI,
232 let actor = getActorForWindow(contentWindow);
234 actor.sendAsyncMessage("rtcpeer:Request", request);
238 function handleGUMStop(aSubject, aTopic, aData) {
239 let contentWindow = Services.wm.getOuterWindowWithId(aSubject.windowID);
242 windowID: aSubject.windowID,
243 rawID: aSubject.rawID,
244 mediaSource: aSubject.mediaSource,
247 let actor = getActorForWindow(contentWindow);
249 actor.sendAsyncMessage("webrtc:StopRecording", request);
253 function handleGUMRequest(aSubject, aTopic, aData) {
254 // Now that a getUserMedia request has been created, we should check
255 // to see if we're supposed to have any devices muted. This needs
256 // to occur after the getUserMedia request is made, since the global
257 // mute state is associated with the GetUserMediaWindowListener, which
258 // is only created after a getUserMedia request.
259 GlobalMuteListener.init();
261 let constraints = aSubject.getConstraints();
262 let contentWindow = Services.wm.getOuterWindowWithId(aSubject.windowID);
270 aSubject.getAudioOutputOptions(),
273 aSubject.isHandlingUserInput
288 let audioInputDevices = [];
289 let videoInputDevices = [];
290 let audioOutputDevices = [];
293 // MediaStreamConstraints defines video as 'boolean or MediaTrackConstraints'.
294 let video = aConstraints.video || aConstraints.picture;
295 let audio = aConstraints.audio;
297 video && typeof video != "boolean" && video.mediaSource != "camera";
299 audio && typeof audio != "boolean" && audio.mediaSource != "microphone";
301 const hasInherentConstraints = ({ facingMode, groupId, deviceId }) => {
302 const id = [deviceId].flat()[0];
303 return facingMode || groupId || (id && id != "default"); // flock workaround
305 let hasInherentAudioConstraints =
308 [audio, ...(audio.advanced || [])].some(hasInherentConstraints);
309 let hasInherentVideoConstraints =
312 [video, ...(video.advanced || [])].some(hasInherentConstraints);
314 for (let device of aDevices) {
315 device = device.QueryInterface(Ci.nsIMediaDevice);
317 name: device.rawName, // unfiltered device name to show to the user
318 deviceIndex: devices.length,
321 mediaSource: device.mediaSource,
322 canRequestOsLevelPrompt: device.canRequestOsLevelPrompt,
324 switch (device.type) {
326 // Check that if we got a microphone, we have not requested an audio
327 // capture, and if we have requested an audio capture, we are not
328 // getting a microphone instead.
329 if (audio && (device.mediaSource == "microphone") != sharingAudio) {
330 audioInputDevices.push(deviceObject);
331 devices.push(device);
335 // Verify that if we got a camera, we haven't requested a screen share,
336 // or that if we requested a screen share we aren't getting a camera.
337 if (video && (device.mediaSource == "camera") != sharingScreen) {
339 deviceObject.scary = true;
341 videoInputDevices.push(deviceObject);
342 devices.push(device);
346 if (aRequestType == "selectaudiooutput") {
347 audioOutputDevices.push(deviceObject);
348 devices.push(device);
354 let requestTypes = [];
355 if (videoInputDevices.length) {
356 requestTypes.push(sharingScreen ? "Screen" : "Camera");
358 if (audioInputDevices.length) {
359 requestTypes.push(sharingAudio ? "AudioCapture" : "Microphone");
361 if (audioOutputDevices.length) {
362 requestTypes.push("Speaker");
365 if (!requestTypes.length) {
366 // Device enumeration is done ahead of handleGUMRequest, so we're not
367 // responsible for handling the NotFoundError spec case.
368 denyGUMRequest({ callID: aCallID });
372 if (!aContentWindow.pendingGetUserMediaRequests) {
373 setupPendingListsInitially(aContentWindow);
375 aContentWindow.pendingGetUserMediaRequests.set(aCallID, devices);
377 // WebRTC prompts have a bunch of special requirements, such as being able to
378 // grant two permissions (microphone and camera), selecting devices and showing
379 // a screen sharing preview. All this could have probably been baked into
380 // nsIContentPermissionRequest prompts, but the team that implemented this back
381 // then chose to just build their own prompting mechanism instead.
383 // So, what you are looking at here is not a real nsIContentPermissionRequest, but
384 // something that looks really similar and will be transmitted to webrtcUI.sys.mjs
385 // for showing the prompt.
386 // Note that we basically do the permission delegate check in
387 // nsIContentPermissionRequest, but because webrtc uses their own prompting
388 // system, we should manually apply the delegate policy here. Permission
389 // should be delegated using Feature Policy and top principal
390 const permDelegateHandler =
391 aContentWindow.document.permDelegateHandler.QueryInterface(
392 Ci.nsIPermissionDelegateHandler
395 const shouldDelegatePermission =
396 permDelegateHandler.permissionDelegateFPEnabled;
398 let secondOrigin = undefined;
400 shouldDelegatePermission &&
401 permDelegateHandler.maybeUnsafePermissionDelegate(requestTypes)
403 // We are going to prompt both first party and third party origin.
404 // SecondOrigin should be third party
405 secondOrigin = aContentWindow.document.nodePrincipal.origin;
412 documentURI: aContentWindow.document.documentURI,
414 isHandlingUserInput: aIsHandlingUserInput,
415 shouldDelegatePermission,
422 hasInherentAudioConstraints,
423 hasInherentVideoConstraints,
424 audioOutputId: aAudioOutputOptions.deviceId,
427 let actor = getActorForWindow(aContentWindow);
429 actor.sendAsyncMessage("webrtc:Request", request);
433 function denyGUMRequest(aData) {
435 if (aData.noOSPermission) {
436 subject = "getUserMedia:response:noOSPermission";
438 subject = "getUserMedia:response:deny";
440 Services.obs.notifyObservers(null, subject, aData.callID);
442 if (!aData.windowID) {
445 let contentWindow = Services.wm.getOuterWindowWithId(aData.windowID);
446 if (contentWindow.pendingGetUserMediaRequests) {
447 forgetGUMRequest(contentWindow, aData.callID);
451 function forgetGUMRequest(aContentWindow, aCallID) {
452 aContentWindow.pendingGetUserMediaRequests.delete(aCallID);
453 forgetPendingListsEventually(aContentWindow);
456 function forgetPCRequest(aContentWindow, aCallID) {
457 aContentWindow.pendingPeerConnectionRequests.delete(aCallID);
458 forgetPendingListsEventually(aContentWindow);
461 function setupPendingListsInitially(aContentWindow) {
462 if (aContentWindow.pendingGetUserMediaRequests) {
465 aContentWindow.pendingGetUserMediaRequests = new Map();
466 aContentWindow.pendingPeerConnectionRequests = new Set();
467 aContentWindow.addEventListener("unload", WebRTCChild.handleEvent);
470 function forgetPendingListsEventually(aContentWindow) {
472 aContentWindow.pendingGetUserMediaRequests.size ||
473 aContentWindow.pendingPeerConnectionRequests.size
477 aContentWindow.pendingGetUserMediaRequests = null;
478 aContentWindow.pendingPeerConnectionRequests = null;
479 aContentWindow.removeEventListener("unload", WebRTCChild.handleEvent);
482 function updateIndicators(aSubject, aTopic, aData) {
484 aSubject instanceof Ci.nsIPropertyBag &&
485 aSubject.getProperty("requestURL") == kBrowserURL
487 // Ignore notifications caused by the browser UI showing previews.
491 let contentWindow = aSubject.getProperty("window");
493 let actor = contentWindow ? getActorForWindow(contentWindow) : null;
495 let tabState = getTabStateForContentWindow(contentWindow, false);
496 tabState.windowId = getInnerWindowIDForWindow(contentWindow);
498 // If we were silencing DOM notifications before, but we've updated
499 // state such that we're no longer sharing one of our displays, then
500 // reset the silencing state.
501 if (actor.suppressNotifications) {
502 if (!tabState.screen && !tabState.window && !tabState.browser) {
503 actor.suppressNotifications = false;
507 tabState.suppressNotifications = actor.suppressNotifications;
509 actor.sendAsyncMessage("webrtc:UpdateIndicators", tabState);
513 function removeBrowserSpecificIndicator(aSubject, aTopic, aData) {
514 let contentWindow = Services.wm.getOuterWindowWithId(aData);
515 if (contentWindow.document.documentURI == kBrowserURL) {
516 // Ignore notifications caused by the browser UI showing previews.
520 let tabState = getTabStateForContentWindow(contentWindow, true);
522 tabState.windowId = aData;
524 let actor = getActorForWindow(contentWindow);
526 actor.sendAsyncMessage("webrtc:UpdateIndicators", tabState);
530 function getTabStateForContentWindow(aContentWindow, aForRemove = false) {
537 lazy.MediaManagerService.mediaCaptureWindowState(
548 camera.value == lazy.MediaManagerService.STATE_NOCAPTURE &&
549 microphone.value == lazy.MediaManagerService.STATE_NOCAPTURE &&
550 screen.value == lazy.MediaManagerService.STATE_NOCAPTURE &&
551 window.value == lazy.MediaManagerService.STATE_NOCAPTURE &&
552 browser.value == lazy.MediaManagerService.STATE_NOCAPTURE
554 return { remove: true };
558 return { remove: true };
561 let serializedDevices = [];
562 if (Array.isArray(devices.value)) {
563 serializedDevices = devices.value.map(device => {
566 mediaSource: device.mediaSource,
574 camera: camera.value,
575 microphone: microphone.value,
576 screen: screen.value,
577 window: window.value,
578 browser: browser.value,
579 devices: serializedDevices,
583 function getInnerWindowIDForWindow(aContentWindow) {
584 return aContentWindow.windowGlobalChild.innerWindowId;