1
0
mirror of https://github.com/matrix-org/matrix-js-sdk.git synced 2025-08-06 12:02:40 +03:00

Fix tests

Bit of a re-organisation so a peerconnection exists when the tests
go to mock things out. placeCall methods return promises to make this
possible.
This commit is contained in:
David Baker
2021-03-09 14:09:55 +00:00
parent 51e817a3a2
commit 8375638d76
2 changed files with 16 additions and 12 deletions

View File

@@ -79,6 +79,7 @@ class MockRTCPeerConnection {
return Promise.resolve(); return Promise.resolve();
} }
close() {} close() {}
getStats() { return []; }
} }
describe('Call', function() { describe('Call', function() {
@@ -122,6 +123,7 @@ describe('Call', function() {
// We just stub out sendEvent: we're not interested in testing the client's // We just stub out sendEvent: we're not interested in testing the client's
// event sending code here // event sending code here
client.client.sendEvent = () => {}; client.client.sendEvent = () => {};
client.httpBackend.when("GET", "/voip/turnServer").respond(200, {});
call = new MatrixCall({ call = new MatrixCall({
client: client.client, client: client.client,
roomId: '!foo:bar', roomId: '!foo:bar',
@@ -138,7 +140,9 @@ describe('Call', function() {
}); });
it('should ignore candidate events from non-matching party ID', async function() { it('should ignore candidate events from non-matching party ID', async function() {
await call.placeVoiceCall(); const callPromise = call.placeVoiceCall();
await client.httpBackend.flush();
await callPromise;
await call.onAnswerReceived({ await call.onAnswerReceived({
getContent: () => { getContent: () => {
return { return {
@@ -192,7 +196,9 @@ describe('Call', function() {
}); });
it('should add candidates received before answer if party ID is correct', async function() { it('should add candidates received before answer if party ID is correct', async function() {
await call.placeVoiceCall(); const callPromise = call.placeVoiceCall();
await client.httpBackend.flush();
await callPromise;
call.peerConn.addIceCandidate = jest.fn(); call.peerConn.addIceCandidate = jest.fn();
call.onRemoteIceCandidatesReceived({ call.onRemoteIceCandidatesReceived({

View File

@@ -333,11 +333,11 @@ export class MatrixCall extends EventEmitter {
* Place a voice call to this room. * Place a voice call to this room.
* @throws If you have not specified a listener for 'error' events. * @throws If you have not specified a listener for 'error' events.
*/ */
placeVoiceCall() { async placeVoiceCall() {
logger.debug("placeVoiceCall"); logger.debug("placeVoiceCall");
this.checkForErrorListener(); this.checkForErrorListener();
const constraints = getUserMediaContraints(ConstraintsType.Audio); const constraints = getUserMediaContraints(ConstraintsType.Audio);
this.placeCallWithConstraints(constraints); await this.placeCallWithConstraints(constraints);
this.type = CallType.Voice; this.type = CallType.Voice;
} }
@@ -349,13 +349,13 @@ export class MatrixCall extends EventEmitter {
* to render the local camera preview. * to render the local camera preview.
* @throws If you have not specified a listener for 'error' events. * @throws If you have not specified a listener for 'error' events.
*/ */
placeVideoCall(remoteVideoElement: HTMLVideoElement, localVideoElement: HTMLVideoElement) { async placeVideoCall(remoteVideoElement: HTMLVideoElement, localVideoElement: HTMLVideoElement) {
logger.debug("placeVideoCall"); logger.debug("placeVideoCall");
this.checkForErrorListener(); this.checkForErrorListener();
this.localVideoElement = localVideoElement; this.localVideoElement = localVideoElement;
this.remoteVideoElement = remoteVideoElement; this.remoteVideoElement = remoteVideoElement;
const constraints = getUserMediaContraints(ConstraintsType.Video); const constraints = getUserMediaContraints(ConstraintsType.Video);
this.placeCallWithConstraints(constraints); await this.placeCallWithConstraints(constraints);
this.type = CallType.Video; this.type = CallType.Video;
} }
@@ -864,7 +864,6 @@ export class MatrixCall extends EventEmitter {
// why do we enable audio (and only audio) tracks here? -- matthew // why do we enable audio (and only audio) tracks here? -- matthew
setTracksEnabled(stream.getAudioTracks(), true); setTracksEnabled(stream.getAudioTracks(), true);
this.peerConn = this.createPeerConnection();
for (const audioTrack of stream.getAudioTracks()) { for (const audioTrack of stream.getAudioTracks()) {
logger.info("Adding audio track with id " + audioTrack.id); logger.info("Adding audio track with id " + audioTrack.id);
@@ -1677,11 +1676,10 @@ export class MatrixCall extends EventEmitter {
logger.warn("Failed to get TURN credentials! Proceeding with call anyway..."); logger.warn("Failed to get TURN credentials! Proceeding with call anyway...");
} }
// It would be really nice if we could start gathering candidates at this point // create the peer connection now so it can be gathering candidates while we get user
// so the ICE agent could be gathering while we open our media devices: we already // media (assuming a candidate pool size is configured)
// know the type of the call and therefore what tracks we want to send. this.peerConn = this.createPeerConnection();
// Perhaps we could do this by making fake tracks now and then using replaceTrack()
// once we have the actual tracks? (Can we make fake tracks?)
try { try {
const mediaStream = await navigator.mediaDevices.getUserMedia(constraints); const mediaStream = await navigator.mediaDevices.getUserMedia(constraints);
this.gotUserMediaForInvite(mediaStream); this.gotUserMediaForInvite(mediaStream);