1
0
mirror of https://github.com/matrix-org/matrix-js-sdk.git synced 2025-11-29 16:43:09 +03:00

Merge pull request #29 from matrix-org/screen-sharing

Basic screen-sharing support; adds a screensharing stream into the call and adds support for playback of both an audio-only stream (i.e. the voice-over) alongside an AV stream like the screenshare.
This commit is contained in:
Matthew Hodgson
2015-10-21 01:45:38 +01:00
2 changed files with 171 additions and 16 deletions

View File

@@ -2190,6 +2190,7 @@ function setupCallEventHandler(client) {
var content = event.getContent();
var call = content.call_id ? client.callList[content.call_id] : undefined;
var i;
//console.log("RECV %s content=%s", event.getType(), JSON.stringify(content));
if (event.getType() === "m.call.invite") {
if (event.getSender() === client.credentials.userId) {

View File

@@ -44,6 +44,8 @@ function MatrixCall(opts) {
// possible
this.candidateSendQueue = [];
this.candidateSendTries = 0;
this.screenSharingStream = null;
}
/** The length of time a call can be ringing for. */
MatrixCall.CALL_TIMEOUT_MS = 60000;
@@ -64,6 +66,7 @@ utils.inherits(MatrixCall, EventEmitter);
* @throws If you have not specified a listener for 'error' events.
*/
MatrixCall.prototype.placeVoiceCall = function() {
debuglog("placeVoiceCall");
checkForErrorListener(this);
_placeCallWithConstraints(this, _getUserMediaVideoContraints('voice'));
this.type = 'voice';
@@ -78,6 +81,7 @@ MatrixCall.prototype.placeVoiceCall = function() {
* @throws If you have not specified a listener for 'error' events.
*/
MatrixCall.prototype.placeVideoCall = function(remoteVideoElement, localVideoElement) {
debuglog("placeVideoCall");
checkForErrorListener(this);
this.localVideoElement = localVideoElement;
this.remoteVideoElement = remoteVideoElement;
@@ -86,6 +90,43 @@ MatrixCall.prototype.placeVideoCall = function(remoteVideoElement, localVideoEle
_tryPlayRemoteStream(this);
};
/**
* Place a screen-sharing call to this room. This includes audio.
* @param {Element} remoteVideoElement a <code>&lt;video&gt;</code> DOM element
* to render video to.
* @param {Element} localVideoElement a <code>&lt;video&gt;</code> DOM element
* to render the local camera preview.
* @throws If you have not specified a listener for 'error' events.
*/
MatrixCall.prototype.placeScreenSharingCall =
function(remoteVideoElement, localVideoElement)
{
debuglog("placeScreenSharingCall");
checkForErrorListener(this);
var screenConstraints = _getChromeScreenSharingConstraints(this);
if (!screenConstraints) {
return;
}
this.localVideoElement = localVideoElement;
this.remoteVideoElement = remoteVideoElement;
var self = this;
this.webRtc.getUserMedia(screenConstraints, function(stream) {
self.screenSharingStream = stream;
debuglog("Got screen stream, requesting audio stream...");
var audioConstraints = _getUserMediaVideoContraints('voice');
_placeCallWithConstraints(self, audioConstraints);
}, function(err) {
self.emit("error",
callError(
MatrixCall.ERR_NO_USER_MEDIA,
"Failed to get screen-sharing stream: " + err
)
);
});
this.type = 'video';
_tryPlayRemoteStream(this);
};
/**
* Retrieve the local <code>&lt;video&gt;</code> DOM element.
* @return {Element} The dom element
@@ -95,13 +136,23 @@ MatrixCall.prototype.getLocalVideoElement = function() {
};
/**
* Retrieve the remote <code>&lt;video&gt;</code> DOM element.
* Retrieve the remote <code>&lt;video&gt;</code> DOM element
* used for playing back video capable streams.
* @return {Element} The dom element
*/
MatrixCall.prototype.getRemoteVideoElement = function() {
return this.remoteVideoElement;
};
/**
* Retrieve the remote <code>&lt;audio&gt;</code> DOM element
* used for playing back audio only streams.
* @return {Element} The dom element
*/
MatrixCall.prototype.getRemoteAudioElement = function() {
return this.remoteAudioElement;
};
/**
* Set the local <code>&lt;video&gt;</code> DOM element. If this call is active,
* video will be rendered to it immediately.
@@ -126,7 +177,7 @@ MatrixCall.prototype.setLocalVideoElement = function(element) {
/**
* Set the remote <code>&lt;video&gt;</code> DOM element. If this call is active,
* video will be rendered to it immediately.
* the first received video-capable stream will be rendered to it immediately.
* @param {Element} element The <code>&lt;video&gt;</code> DOM element.
*/
MatrixCall.prototype.setRemoteVideoElement = function(element) {
@@ -134,6 +185,16 @@ MatrixCall.prototype.setRemoteVideoElement = function(element) {
_tryPlayRemoteStream(this);
};
/**
* Set the remote <code>&lt;audio&gt;</code> DOM element. If this call is active,
* the first received audio-only stream will be rendered to it immediately.
* @param {Element} element The <code>&lt;video&gt;</code> DOM element.
*/
MatrixCall.prototype.setRemoteAudioElement = function(element) {
this.remoteAudioElement = element;
_tryPlayRemoteAudioStream(this);
};
/**
* Configure this call from an invite event. Used by MatrixClient.
* @protected
@@ -238,6 +299,7 @@ MatrixCall.prototype._replacedBy = function(newCall) {
}
newCall.localVideoElement = this.localVideoElement;
newCall.remoteVideoElement = this.remoteVideoElement;
newCall.remoteAudioElement = this.remoteAudioElement;
this.successor = newCall;
this.emit("replaced", newCall);
this.hangup(true);
@@ -326,12 +388,19 @@ MatrixCall.prototype._gotUserMediaForInvite = function(stream) {
if (this.state == 'ended') {
return;
}
debuglog("_gotUserMediaForInvite -> " + this.type);
var self = this;
var videoEl = this.getLocalVideoElement();
if (videoEl && this.type == 'video') {
videoEl.autoplay = true;
videoEl.src = this.URL.createObjectURL(stream);
if (this.screenSharingStream) {
debuglog("Setting screen sharing stream to the local video element");
videoEl.src = this.URL.createObjectURL(this.screenSharingStream);
}
else {
videoEl.src = this.URL.createObjectURL(stream);
}
videoEl.muted = true;
setTimeout(function() {
var vel = self.getLocalVideoElement();
@@ -342,9 +411,16 @@ MatrixCall.prototype._gotUserMediaForInvite = function(stream) {
}
this.localAVStream = stream;
// why do we enable audio (and only audio) tracks here? -- matthew
setTracksEnabled(stream.getAudioTracks(), true);
this.peerConn = _createPeerConnection(this);
this.peerConn.addStream(stream);
if (this.screenSharingStream) {
console.log("Adding screen-sharing stream to peer connection");
this.peerConn.addStream(this.screenSharingStream);
// let's use this for the local preview...
this.localAVStream = this.screenSharingStream;
}
this.peerConn.createOffer(
hookCallback(self, self._gotLocalOffer),
hookCallback(self, self._getLocalOfferFailed)
@@ -529,8 +605,9 @@ MatrixCall.prototype._getLocalOfferFailed = function(error) {
/**
* Internal
* @private
* @param {Object} error
*/
MatrixCall.prototype._getUserMediaFailed = function() {
MatrixCall.prototype._getUserMediaFailed = function(error) {
this.emit(
"error",
callError(
@@ -598,22 +675,21 @@ MatrixCall.prototype._onSetRemoteDescriptionError = function(e) {
* @param {Object} event
*/
MatrixCall.prototype._onAddStream = function(event) {
debuglog("Stream added" + event);
debuglog("Stream id " + event.stream.id + " added");
var s = event.stream;
this.remoteAVStream = s;
if (this.direction == 'inbound') {
if (s.getVideoTracks().length > 0) {
this.type = 'video';
} else {
this.type = 'voice';
}
if (s.getVideoTracks().length > 0) {
this.type = 'video';
this.remoteAVStream = s;
} else {
this.type = 'voice';
this.remoteAStream = s;
}
var self = this;
forAllTracksOnStream(s, function(t) {
debuglog("Track id " + t.id + " added");
// not currently implemented in chrome
t.onstarted = hookCallback(self, self._onRemoteStreamTrackStarted);
});
@@ -622,7 +698,12 @@ MatrixCall.prototype._onAddStream = function(event) {
// not currently implemented in chrome
event.stream.onstarted = hookCallback(self, self._onRemoteStreamStarted);
_tryPlayRemoteStream(this);
if (this.type === 'video') {
_tryPlayRemoteStream(this);
}
else {
_tryPlayRemoteAudioStream(this);
}
};
/**
@@ -729,6 +810,12 @@ var terminate = function(self, hangupParty, hangupReason, shouldEmit) {
}
self.getRemoteVideoElement().src = "";
}
if (self.getRemoteAudioElement()) {
if (self.getRemoteAudioElement().pause) {
self.getRemoteAudioElement().pause();
}
self.getRemoteAudioElement().src = "";
}
if (self.getLocalVideoElement()) {
if (self.getLocalVideoElement().pause) {
self.getLocalVideoElement().pause();
@@ -760,6 +847,16 @@ var stopAllMedia = function(self) {
self.localAVStream.stop();
}
}
if (self.screenSharingStream) {
forAllTracksOnStream(self.screenSharingStream, function(t) {
if (t.stop) {
t.stop();
}
});
if (self.screenSharingStream.stop) {
self.screenSharingStream.stop();
}
}
if (self.remoteAVStream) {
forAllTracksOnStream(self.remoteAVStream, function(t) {
if (t.stop) {
@@ -767,6 +864,13 @@ var stopAllMedia = function(self) {
}
});
}
if (self.remoteAStream) {
forAllTracksOnStream(self.remoteAStream, function(t) {
if (t.stop) {
t.stop();
}
});
}
};
var _tryPlayRemoteStream = function(self) {
@@ -787,6 +891,24 @@ var _tryPlayRemoteStream = function(self) {
}
};
var _tryPlayRemoteAudioStream = function(self) {
if (self.getRemoteAudioElement() && self.remoteAStream) {
var player = self.getRemoteAudioElement();
player.autoplay = true;
player.src = self.URL.createObjectURL(self.remoteAStream);
setTimeout(function() {
var ael = self.getRemoteAudioElement();
if (ael.play) {
ael.play();
}
// OpenWebRTC does not support oniceconnectionstatechange yet
if (self.webRtc.isOpenWebRTC()) {
setState(self, 'connected');
}
}, 0);
}
};
var checkForErrorListener = function(self) {
if (self.listeners("error").length === 0) {
throw new Error(
@@ -885,6 +1007,38 @@ var _createPeerConnection = function(self) {
return pc;
};
var _getChromeScreenSharingConstraints = function(call) {
var screen = global.screen;
if (!screen) {
call.emit("error", callError(
MatrixCall.ERR_NO_USER_MEDIA,
"Couldn't determine screen sharing constaints."
));
return;
}
// it won't work at all if you're not on HTTPS so whine whine whine
if (!global.window || global.window.location.protocol !== "https:") {
call.emit("error", callError(
MatrixCall.ERR_NO_USER_MEDIA,
"You need to be using HTTPS to place a screen-sharing call."
));
return;
}
return {
video: {
mandatory: {
chromeMediaSource: "screen",
chromeMediaSourceId: "" + Date.now(),
maxWidth: screen.width,
maxHeight: screen.height,
minFrameRate: 1,
maxFrameRate: 10
}
}
};
};
var _getUserMediaVideoContraints = function(callType) {
switch (callType) {
case 'voice':
@@ -893,9 +1047,9 @@ var _getUserMediaVideoContraints = function(callType) {
return ({audio: true, video: {
mandatory: {
minWidth: 640,
maxWidth: 640,
maxWidth: 1024,
minHeight: 360,
maxHeight: 360
maxHeight: 576
}
}});
}