1
0
mirror of https://github.com/matrix-org/matrix-js-sdk.git synced 2025-11-26 17:03:12 +03:00

Merge branch 'develop' into gsouquet/cache-decrypt

This commit is contained in:
Germain Souquet
2021-05-10 17:35:05 +01:00
9 changed files with 231 additions and 261 deletions

View File

@@ -1,3 +1,26 @@
Changes in [10.1.0](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v10.1.0) (2021-05-10)
==================================================================================================
[Full Changelog](https://github.com/matrix-org/matrix-js-sdk/compare/v10.1.0-rc.1...v10.1.0)
* No changes since rc.1
Changes in [10.1.0-rc.1](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v10.1.0-rc.1) (2021-05-04)
============================================================================================================
[Full Changelog](https://github.com/matrix-org/matrix-js-sdk/compare/v10.0.0...v10.1.0-rc.1)
* Revert "Raise logging dramatically to chase pending event errors"
[\#1681](https://github.com/matrix-org/matrix-js-sdk/pull/1681)
* Add test coverage collection script
[\#1677](https://github.com/matrix-org/matrix-js-sdk/pull/1677)
* Raise logging dramatically to chase pending event errors
[\#1678](https://github.com/matrix-org/matrix-js-sdk/pull/1678)
* Support MSC3086 asserted identity
[\#1674](https://github.com/matrix-org/matrix-js-sdk/pull/1674)
* Fix `/search` with no results field work again
[\#1670](https://github.com/matrix-org/matrix-js-sdk/pull/1670)
* Add room.getMembers method
[\#1672](https://github.com/matrix-org/matrix-js-sdk/pull/1672)
Changes in [10.0.0](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v10.0.0) (2021-04-26)
==================================================================================================
[Full Changelog](https://github.com/matrix-org/matrix-js-sdk/compare/v10.0.0-rc.1...v10.0.0)

View File

@@ -31,6 +31,23 @@ function addListeners(call) {
call.hangup();
disableButtons(false, true, true);
});
call.on("feeds_changed", function(feeds) {
const localFeed = feeds.find((feed) => feed.isLocal());
const remoteFeed = feeds.find((feed) => !feed.isLocal());
const remoteElement = document.getElementById("remote");
const localElement = document.getElementById("local");
if (remoteFeed) {
remoteElement.srcObject = remoteFeed.stream;
remoteElement.play();
}
if (localFeed) {
localElement.muted = true;
localElement.srcObject = localFeed.stream;
localElement.play();
}
});
}
window.onload = function() {
@@ -62,10 +79,7 @@ function syncComplete() {
);
console.log("Call => %s", call);
addListeners(call);
call.placeVideoCall(
document.getElementById("remote"),
document.getElementById("local")
);
call.placeVideoCall();
document.getElementById("result").innerHTML = "<p>Placed call.</p>";
disableButtons(true, true, false);
};

View File

@@ -1,26 +1,34 @@
<html>
<head>
<title>VoIP Test</title>
<script src="lib/matrix.js"></script>
<script src="browserTest.js"></script>
<title>VoIP Test</title>
<script src="lib/matrix.js"></script>
<script src="browserTest.js"></script>
</head>
<body>
You can place and receive calls with this example. Make sure to edit the
You can place and receive calls with this example. Make sure to edit the
constants in <code>browserTest.js</code> first.
<div id="config"></div>
<div id="result"></div>
<button id="call">Place Call</button>
<button id="answer">Answer Call</button>
<button id="hangup">Hangup Call</button>
<div id="videoBackground">
<div id="videoContainer">
<video id="remote"></video>
</div>
</div>
<div id="videoBackground">
<div id="videoContainer">
<video id="local"></video>
</div>
<div id="videoBackground" class="video-background">
<video class="video-element" id="local"></video>
<video class="video-element" id="remote"></video>
</div>
</body>
</html>
<style>
.video-background {
height: 500px;
margin: 10px;
}
.video-element {
height: 100%;
}
</style>

View File

@@ -1,6 +1,6 @@
{
"name": "matrix-js-sdk",
"version": "10.0.0",
"version": "10.1.0",
"description": "Matrix Client-Server SDK for Javascript",
"scripts": {
"prepublishOnly": "yarn build",

View File

@@ -55,7 +55,6 @@ export * from "./content-repo";
export * as ContentHelpers from "./content-helpers";
export {
createNewMatrixCall,
setAudioOutput as setMatrixCallAudioOutput,
setAudioInput as setMatrixCallAudioInput,
setVideoInput as setMatrixCallVideoInput,
} from "./webrtc/call";

View File

@@ -28,7 +28,15 @@ import MatrixEvent from '../models/event';
import {EventType} from '../@types/event';
import { RoomMember } from '../models/room-member';
import { randomString } from '../randomstring';
import { MCallReplacesEvent, MCallAnswer, MCallOfferNegotiate, CallCapabilities } from './callEventTypes';
import {
MCallReplacesEvent,
MCallAnswer,
MCallOfferNegotiate,
CallCapabilities,
SDPStreamMetadataPurpose,
} from './callEventTypes';
import { CallFeed } from './callFeed';
// events: hangup, error(err), replaced(call), state(state, oldState)
@@ -106,6 +114,8 @@ export enum CallEvent {
RemoteHoldUnhold = 'remote_hold_unhold',
// backwards compat alias for LocalHoldUnhold: remove in a major version bump
HoldUnhold = 'hold_unhold',
// Feeds have changed
FeedsChanged = 'feeds_changed',
AssertedIdentityChanged = 'asserted_identity_changed',
}
@@ -255,11 +265,8 @@ export class MatrixCall extends EventEmitter {
private candidateSendTries: number;
private sentEndOfCandidates: boolean;
private peerConn: RTCPeerConnection;
private localVideoElement: HTMLVideoElement;
private remoteVideoElement: HTMLVideoElement;
private remoteAudioElement: HTMLAudioElement;
private feeds: Array<CallFeed>;
private screenSharingStream: MediaStream;
private remoteStream: MediaStream;
private localAVStream: MediaStream;
private inviteOrAnswerSent: boolean;
private waitForLocalAVStream: boolean;
@@ -336,6 +343,8 @@ export class MatrixCall extends EventEmitter {
this.unholdingRemote = false;
this.micMuted = false;
this.vidMuted = false;
this.feeds = [];
}
/**
@@ -352,17 +361,11 @@ export class MatrixCall extends EventEmitter {
/**
* Place a video call to this room.
* @param {Element} remoteVideoElement a <code>&lt;video&gt;</code> DOM element
* to render video to.
* @param {Element} localVideoElement a <code>&lt;video&gt;</code> DOM element
* to render the local camera preview.
* @throws If you have not specified a listener for 'error' events.
*/
async placeVideoCall(remoteVideoElement: HTMLVideoElement, localVideoElement: HTMLVideoElement) {
async placeVideoCall() {
logger.debug("placeVideoCall");
this.checkForErrorListener();
this.localVideoElement = localVideoElement;
this.remoteVideoElement = remoteVideoElement;
const constraints = getUserMediaContraints(ConstraintsType.Video);
this.type = CallType.Video;
await this.placeCallWithConstraints(constraints);
@@ -372,22 +375,11 @@ export class MatrixCall extends EventEmitter {
* Place a screen-sharing call to this room. This includes audio.
* <b>This method is EXPERIMENTAL and subject to change without warning. It
* only works in Google Chrome and Firefox >= 44.</b>
* @param {Element} remoteVideoElement a <code>&lt;video&gt;</code> DOM element
* to render video to.
* @param {Element} localVideoElement a <code>&lt;video&gt;</code> DOM element
* to render the local camera preview.
* @throws If you have not specified a listener for 'error' events.
*/
async placeScreenSharingCall(
remoteVideoElement: HTMLVideoElement,
localVideoElement: HTMLVideoElement,
selectDesktopCapturerSource?: () => Promise<DesktopCapturerSource>,
) {
async placeScreenSharingCall(selectDesktopCapturerSource?: () => Promise<DesktopCapturerSource>) {
logger.debug("placeScreenSharingCall");
this.checkForErrorListener();
this.localVideoElement = localVideoElement;
this.remoteVideoElement = remoteVideoElement;
try {
const screenshareConstraints = await getScreenshareContraints(selectDesktopCapturerSource);
if (!screenshareConstraints) {
@@ -433,85 +425,53 @@ export class MatrixCall extends EventEmitter {
}
/**
* Retrieve the local <code>&lt;video&gt;</code> DOM element.
* @return {Element} The dom element
* Returns an array of all CallFeeds
* @returns {Array<CallFeed>} CallFeeds
*/
public getLocalVideoElement(): HTMLVideoElement {
return this.localVideoElement;
public getFeeds(): Array<CallFeed> {
return this.feeds;
}
/**
* Retrieve the remote <code>&lt;video&gt;</code> DOM element
* used for playing back video capable streams.
* @return {Element} The dom element
* Returns an array of all local CallFeeds
* @returns {Array<CallFeed>} local CallFeeds
*/
public getRemoteVideoElement(): HTMLVideoElement {
return this.remoteVideoElement;
public getLocalFeeds(): Array<CallFeed> {
return this.feeds.filter((feed) => {return feed.isLocal()});
}
/**
* Retrieve the remote <code>&lt;audio&gt;</code> DOM element
* used for playing back audio only streams.
* @return {Element} The dom element
* Returns an array of all remote CallFeeds
* @returns {Array<CallFeed>} remote CallFeeds
*/
public getRemoteAudioElement(): HTMLAudioElement {
return this.remoteAudioElement;
public getRemoteFeeds(): Array<CallFeed> {
return this.feeds.filter((feed) => {return !feed.isLocal()});
}
/**
* Set the local <code>&lt;video&gt;</code> DOM element. If this call is active,
* video will be rendered to it immediately.
* @param {Element} element The <code>&lt;video&gt;</code> DOM element.
* Returns true if there are no incoming feeds,
* otherwise returns false
* @returns {boolean} no incoming feeds
*/
public async setLocalVideoElement(element: HTMLVideoElement) {
this.localVideoElement = element;
public noIncomingFeeds(): boolean {
return !this.feeds.some((feed) => !feed.isLocal());
}
if (element && this.localAVStream && this.type === CallType.Video) {
element.autoplay = true;
element.srcObject = this.localAVStream;
element.muted = true;
try {
await element.play();
} catch (e) {
logger.info("Failed to play local video element", e);
}
private pushNewFeed(stream: MediaStream, userId: string, purpose: SDPStreamMetadataPurpose) {
// Try to find a feed with the same stream id as the new stream,
// if we find it replace the old stream with the new one
const feed = this.feeds.find((feed) => feed.stream.id === stream.id);
if (feed) {
feed.setNewStream(stream);
} else {
this.feeds.push(new CallFeed(stream, userId, purpose, this.client, this.roomId));
this.emit(CallEvent.FeedsChanged, this.feeds);
}
}
/**
* Set the remote <code>&lt;video&gt;</code> DOM element. If this call is active,
* the first received video-capable stream will be rendered to it immediately.
* @param {Element} element The <code>&lt;video&gt;</code> DOM element.
*/
public setRemoteVideoElement(element: HTMLVideoElement) {
if (element === this.remoteVideoElement) return;
element.autoplay = true;
// if we already have an audio element set, use that instead and mute the audio
// on this video element.
if (this.remoteAudioElement) element.muted = true;
this.remoteVideoElement = element;
if (this.remoteStream) {
this.playRemoteVideo();
}
}
/**
* Set the remote <code>&lt;audio&gt;</code> DOM element. If this call is active,
* the first received audio-only stream will be rendered to it immediately.
* The audio will *not* be rendered from the remoteVideoElement.
* @param {Element} element The <code>&lt;video&gt;</code> DOM element.
*/
public async setRemoteAudioElement(element: HTMLAudioElement) {
if (element === this.remoteAudioElement) return;
this.remoteAudioElement = element;
if (this.remoteStream) this.playRemoteAudio();
private deleteAllFeeds() {
this.feeds = [];
this.emit(CallEvent.FeedsChanged, this.feeds);
}
// The typescript definitions have this type as 'any' :(
@@ -566,16 +526,18 @@ export class MatrixCall extends EventEmitter {
return;
}
const remoteStream = this.feeds.find((feed) => {return !feed.isLocal()})?.stream;
// According to previous comments in this file, firefox at some point did not
// add streams until media started ariving on them. Testing latest firefox
// (81 at time of writing), this is no longer a problem, so let's do it the correct way.
if (!this.remoteStream || this.remoteStream.getTracks().length === 0) {
if (!remoteStream || remoteStream.getTracks().length === 0) {
logger.error("No remote stream or no tracks after setting remote description!");
this.terminate(CallParty.Local, CallErrorCode.SetRemoteDescription, false);
return;
}
this.type = this.remoteStream.getTracks().some(t => t.kind === 'video') ? CallType.Video : CallType.Voice;
this.type = remoteStream.getTracks().some(t => t.kind === 'video') ? CallType.Video : CallType.Voice;
this.setState(CallState.Ringing);
@@ -660,9 +622,6 @@ export class MatrixCall extends EventEmitter {
newCall.gotUserMediaForAnswer(this.localAVStream);
delete(this.localAVStream);
}
newCall.localVideoElement = this.localVideoElement;
newCall.remoteVideoElement = this.remoteVideoElement;
newCall.remoteAudioElement = this.remoteAudioElement;
this.successor = newCall;
this.emit(CallEvent.Replaced, newCall);
this.hangup(CallErrorCode.Replaced, true);
@@ -774,10 +733,6 @@ export class MatrixCall extends EventEmitter {
}
this.updateMuteStatus();
if (!onHold) {
this.playRemoteAudio();
}
this.emit(CallEvent.RemoteHoldUnhold, this.remoteOnHold);
}
@@ -831,16 +786,6 @@ export class MatrixCall extends EventEmitter {
const vidShouldBeMuted = this.vidMuted || this.remoteOnHold;
setTracksEnabled(this.localAVStream.getVideoTracks(), !vidShouldBeMuted);
if (this.remoteOnHold) {
if (this.remoteAudioElement && this.remoteAudioElement.srcObject === this.remoteStream) {
this.remoteAudioElement.muted = true;
} else if (this.remoteVideoElement && this.remoteVideoElement.srcObject === this.remoteStream) {
this.remoteVideoElement.muted = true;
}
} else {
this.playRemoteAudio();
}
}
/**
@@ -863,24 +808,13 @@ export class MatrixCall extends EventEmitter {
logger.debug("gotUserMediaForInvite -> " + this.type);
const videoEl = this.getLocalVideoElement();
if (videoEl && this.type === CallType.Video) {
videoEl.autoplay = true;
if (this.screenSharingStream) {
logger.debug(
"Setting screen sharing stream to the local video element",
);
videoEl.srcObject = this.screenSharingStream;
} else {
videoEl.srcObject = stream;
}
videoEl.muted = true;
try {
await videoEl.play();
} catch (e) {
logger.info("Failed to play local video element", e);
}
if (this.screenSharingStream) {
logger.debug(
"Setting screen sharing stream to the local video element",
);
this.pushNewFeed(this.screenSharingStream, this.client.getUserId(), SDPStreamMetadataPurpose.Screenshare);
} else {
this.pushNewFeed(stream, this.client.getUserId(), SDPStreamMetadataPurpose.Usermedia);
}
// why do we enable audio (and only audio) tracks here? -- matthew
@@ -950,19 +884,7 @@ export class MatrixCall extends EventEmitter {
return;
}
const localVidEl = this.getLocalVideoElement();
if (localVidEl && this.type === CallType.Video) {
localVidEl.autoplay = true;
localVidEl.srcObject = stream;
localVidEl.muted = true;
try {
await localVidEl.play();
} catch (e) {
logger.info("Failed to play local video element", e);
}
}
this.pushNewFeed(stream, this.client.getUserId(), SDPStreamMetadataPurpose.Usermedia);
this.localAVStream = stream;
logger.info("Got local AV stream with id " + this.localAVStream.id);
@@ -1377,32 +1299,31 @@ export class MatrixCall extends EventEmitter {
logger.warn(`Streamless ${ev.track.kind} found: ignoring.`);
return;
}
const oldRemoteStream = this.feeds.find((feed) => {return !feed.isLocal()})?.stream;
// If we already have a stream, check this track is from the same one
if (this.remoteStream && ev.streams[0].id !== this.remoteStream.id) {
// Note that we check by ID and always set the remote stream: Chrome appears
// to make new stream objects when tranciever directionality is changed and the 'active'
// status of streams change - Dave
if (oldRemoteStream && ev.streams[0].id !== oldRemoteStream.id) {
logger.warn(
`Ignoring new stream ID ${ev.streams[0].id}: we already have stream ID ${this.remoteStream.id}`,
`Ignoring new stream ID ${ev.streams[0].id}: we already have stream ID ${oldRemoteStream.id}`,
);
return;
}
if (!this.remoteStream) {
if (!oldRemoteStream) {
logger.info("Got remote stream with id " + ev.streams[0].id);
}
// Note that we check by ID above and always set the remote stream: Chrome appears
// to make new stream objects when tranciever directionality is changed and the 'active'
// status of streams change
this.remoteStream = ev.streams[0];
const newRemoteStream = ev.streams[0];
logger.debug(`Track id ${ev.track.id} of kind ${ev.track.kind} added`);
if (ev.track.kind === 'video') {
if (this.remoteVideoElement) {
this.playRemoteVideo();
}
} else {
if (this.remoteAudioElement) this.playRemoteAudio();
}
this.pushNewFeed(newRemoteStream, this.getOpponentMember().userId, SDPStreamMetadataPurpose.Usermedia)
logger.info("playing remote. stream active? " + newRemoteStream.active);
};
onNegotiationNeeded = async () => {
@@ -1425,52 +1346,6 @@ export class MatrixCall extends EventEmitter {
}
};
async playRemoteAudio() {
if (this.remoteVideoElement) this.remoteVideoElement.muted = true;
this.remoteAudioElement.muted = false;
this.remoteAudioElement.srcObject = this.remoteStream;
// if audioOutput is non-default:
try {
if (audioOutput) {
// This seems quite unreliable in Chrome, although I haven't yet managed to make a jsfiddle where
// it fails.
// It seems reliable if you set the sink ID after setting the srcObject and then set the sink ID
// back to the default after the call is over
logger.info("Setting audio sink to " + audioOutput + ", was " + this.remoteAudioElement.sinkId);
await this.remoteAudioElement.setSinkId(audioOutput);
}
} catch (e) {
logger.warn("Couldn't set requested audio output device: using default", e);
}
try {
await this.remoteAudioElement.play();
} catch (e) {
logger.error("Failed to play remote audio element", e);
}
}
private async playRemoteVideo() {
// A note on calling methods on media elements:
// We used to have queues per media element to serialise all calls on those elements.
// The reason given for this was that load() and play() were racing. However, we now
// never call load() explicitly so this seems unnecessary. However, serialising every
// operation was causing bugs where video would not resume because some play command
// had got stuck and all media operations were queued up behind it. If necessary, we
// should serialise the ones that need to be serialised but then be able to interrupt
// them with another load() which will cancel the pending one, but since we don't call
// load() explicitly, it shouldn't be a problem.
this.remoteVideoElement.srcObject = this.remoteStream;
logger.info("playing remote video. stream active? " + this.remoteStream.active);
try {
await this.remoteVideoElement.play();
} catch (e) {
logger.info("Failed to play remote video element", e);
}
}
onHangupReceived = (msg) => {
logger.debug("Hangup received for call ID " + this.callId);
@@ -1630,33 +1505,13 @@ export class MatrixCall extends EventEmitter {
this.inviteTimeout = null;
}
const remoteVid = this.getRemoteVideoElement();
const remoteAud = this.getRemoteAudioElement();
const localVid = this.getLocalVideoElement();
// Order is important here: first we stopAllMedia() and only then we can deleteAllFeeds()
this.stopAllMedia();
this.deleteAllFeeds();
if (remoteVid) {
remoteVid.pause();
remoteVid.srcObject = null;
}
if (remoteAud) {
remoteAud.pause();
remoteAud.srcObject = null;
try {
// As per comment in playRemoteAudio, setting the sink ID back to the default
// once the call is over makes setSinkId work reliably.
await this.remoteAudioElement.setSinkId('')
} catch (e) {
logger.warn("Failed to set sink ID back to default");
}
}
if (localVid) {
localVid.pause();
localVid.srcObject = null;
}
this.hangupParty = hangupParty;
this.hangupReason = hangupReason;
this.setState(CallState.Ended);
this.stopAllMedia();
if (this.peerConn && this.peerConn.signalingState !== 'closed') {
this.peerConn.close();
}
@@ -1667,19 +1522,9 @@ export class MatrixCall extends EventEmitter {
private stopAllMedia() {
logger.debug(`stopAllMedia (stream=${this.localAVStream})`);
if (this.localAVStream) {
for (const track of this.localAVStream.getTracks()) {
track.stop();
}
}
if (this.screenSharingStream) {
for (const track of this.screenSharingStream.getTracks()) {
track.stop();
}
}
if (this.remoteStream) {
for (const track of this.remoteStream.getTracks()) {
for (const feed of this.feeds) {
for (const track of feed.stream.getTracks()) {
track.stop();
}
}
@@ -1910,16 +1755,8 @@ async function getScreenshareContraints(selectDesktopCapturerSource?: () => Prom
}
}
let audioOutput: string;
let audioInput: string;
let videoInput: string;
/**
* Set an audio output device to use for MatrixCalls
* @function
* @param {string=} deviceId the identifier for the device
* undefined treated as unset
*/
export function setAudioOutput(deviceId: string) { audioOutput = deviceId; }
/**
* Set an audio input device to use for MatrixCalls
* @function

View File

@@ -1,6 +1,11 @@
// allow camelcase as these are events type that go onto the wire
/* eslint-disable camelcase */
export enum SDPStreamMetadataPurpose {
Usermedia = "m.usermedia",
Screenshare = "m.screenshare",
}
interface CallOfferAnswer {
type: string;
sdp: string;

84
src/webrtc/callFeed.ts Normal file
View File

@@ -0,0 +1,84 @@
/*
Copyright 2021 Šimon Brandner <simon.bra.ag@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import EventEmitter from "events";
import {SDPStreamMetadataPurpose} from "./callEventTypes";
import MatrixClient from "../client"
import {RoomMember} from "../models/room-member";
export enum CallFeedEvent {
NewStream = "new_stream",
}
export class CallFeed extends EventEmitter {
constructor(
public stream: MediaStream,
public userId: string,
public purpose: SDPStreamMetadataPurpose,
private client: MatrixClient,
private roomId: string,
) {
super()
}
/**
* Returns callRoom member
* @returns member of the callRoom
*/
public getMember(): RoomMember {
const callRoom = this.client.getRoom(this.roomId);
return callRoom.getMember(this.userId);
}
/**
* Returns true if CallFeed is local, otherwise returns false
* @returns {boolean} is local?
*/
public isLocal(): boolean {
return this.userId === this.client.getUserId();
}
// TODO: The two following methods should be later replaced
// by something that will also check if the remote is muted
/**
* Returns true if audio is muted or if there are no audio
* tracks, otherwise returns false
* @returns {boolean} is audio muted?
*/
public isAudioMuted(): boolean {
return this.stream.getAudioTracks().length === 0;
}
/**
* Returns true video is muted or if there are no video
* tracks, otherwise returns false
* @returns {boolean} is video muted?
*/
public isVideoMuted(): boolean {
// We assume only one video track
return this.stream.getVideoTracks().length === 0;
}
/**
* Replaces the current MediaStream with a new one.
* This method should be only used by MatrixCall.
* @param newStream new stream with which to replace the current one
*/
public setNewStream(newStream: MediaStream) {
this.stream = newStream;
this.emit(CallFeedEvent.NewStream, this.stream);
}
}

View File

@@ -3522,9 +3522,9 @@ hmac-drbg@^1.0.1:
minimalistic-crypto-utils "^1.0.1"
hosted-git-info@^2.1.4:
version "2.8.8"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488"
integrity sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==
version "2.8.9"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9"
integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==
html-encoding-sniffer@^2.0.1:
version "2.0.1"