1
0
mirror of https://github.com/matrix-org/matrix-js-sdk.git synced 2025-08-05 00:42:10 +03:00

Make SonarCloud happier (#2850)

* Make SonarCloud happier

* Revert one change due to lack of strict mode upstream

* Fix typo
This commit is contained in:
Michael Telatynski
2022-11-07 12:16:48 +00:00
committed by GitHub
parent 52932f59ab
commit 6c543382e6
23 changed files with 102 additions and 152 deletions

View File

@@ -169,8 +169,8 @@ export interface IPusher {
lang: string; lang: string;
profile_tag?: string; profile_tag?: string;
pushkey: string; pushkey: string;
enabled?: boolean | null | undefined; enabled?: boolean | null;
"org.matrix.msc3881.enabled"?: boolean | null | undefined; "org.matrix.msc3881.enabled"?: boolean | null;
device_id?: string | null; device_id?: string | null;
"org.matrix.msc3881.device_id"?: string | null; "org.matrix.msc3881.device_id"?: string | null;
} }

View File

@@ -2930,7 +2930,7 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
throw new Error("End-to-end encryption disabled"); throw new Error("End-to-end encryption disabled");
} }
const path = this.makeKeyBackupPath(roomId!, sessionId!, version!); const path = this.makeKeyBackupPath(roomId!, sessionId!, version);
await this.http.authedRequest( await this.http.authedRequest(
Method.Put, path.path, path.queryData, data, Method.Put, path.path, path.queryData, data,
{ prefix: ClientPrefix.V3 }, { prefix: ClientPrefix.V3 },
@@ -3284,7 +3284,7 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
throw new Error("End-to-end encryption disabled"); throw new Error("End-to-end encryption disabled");
} }
const path = this.makeKeyBackupPath(roomId!, sessionId!, version!); const path = this.makeKeyBackupPath(roomId!, sessionId!, version);
await this.http.authedRequest( await this.http.authedRequest(
Method.Delete, path.path, path.queryData, undefined, Method.Delete, path.path, path.queryData, undefined,
{ prefix: ClientPrefix.V3 }, { prefix: ClientPrefix.V3 },
@@ -4181,7 +4181,7 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
return this.sendEvent( return this.sendEvent(
roomId, roomId,
threadId as (string | null), threadId as string | null,
eventType, eventType,
sendContent, sendContent,
txnId, txnId,
@@ -5225,7 +5225,7 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
]; ];
// Here we handle non-thread timelines only, but still process any thread events to populate thread summaries. // Here we handle non-thread timelines only, but still process any thread events to populate thread summaries.
let timeline = timelineSet.getTimelineForEvent(events[0].getId()!); let timeline = timelineSet.getTimelineForEvent(events[0].getId());
if (timeline) { if (timeline) {
timeline.getState(EventTimeline.BACKWARDS)!.setUnknownStateEvents(res.state.map(mapper)); timeline.getState(EventTimeline.BACKWARDS)!.setUnknownStateEvents(res.state.map(mapper));
} else { } else {
@@ -6115,15 +6115,7 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
// There can be only room-kind push rule per room // There can be only room-kind push rule per room
// and its id is the room id. // and its id is the room id.
if (this.pushRules) { if (this.pushRules) {
if (!this.pushRules[scope] || !this.pushRules[scope].room) { return this.pushRules[scope]?.room?.find(rule => rule.rule_id === roomId);
return;
}
for (let i = 0; i < this.pushRules[scope].room.length; i++) {
const rule = this.pushRules[scope].room[i];
if (rule.rule_id === roomId) {
return rule;
}
}
} else { } else {
throw new Error( throw new Error(
"SyncApi.sync() must be done before accessing to push rules.", "SyncApi.sync() must be done before accessing to push rules.",
@@ -6473,8 +6465,6 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
// create a new filter // create a new filter
const createdFilter = await this.createFilter(filter.getDefinition()); const createdFilter = await this.createFilter(filter.getDefinition());
// debuglog("Created new filter ID %s: %s", createdFilter.filterId,
// JSON.stringify(createdFilter.getDefinition()));
this.store.setFilterIdByName(filterName, createdFilter.filterId); this.store.setFilterIdByName(filterName, createdFilter.filterId);
return createdFilter.filterId!; return createdFilter.filterId!;
} }
@@ -8434,9 +8424,7 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
keyAlgorithm = "signed_curve25519"; keyAlgorithm = "signed_curve25519";
} }
for (let i = 0; i < devices.length; ++i) { for (const [userId, deviceId] of devices) {
const userId = devices[i][0];
const deviceId = devices[i][1];
const query = queries[userId] || {}; const query = queries[userId] || {};
queries[userId] = query; queries[userId] = query;
query[deviceId] = keyAlgorithm; query[deviceId] = keyAlgorithm;

View File

@@ -230,7 +230,7 @@ export class DeviceList extends TypedEventEmitter<EmittedEvents, CryptoEventHand
}, delay); }, delay);
} }
return savePromise!; return savePromise;
} }
/** /**

View File

@@ -804,7 +804,7 @@ export class OlmDevice {
log, log,
); );
return info!; return info;
} }
/** /**

View File

@@ -119,12 +119,10 @@ class OlmEncryption extends EncryptionAlgorithm {
const promises: Promise<void>[] = []; const promises: Promise<void>[] = [];
for (let i = 0; i < users.length; ++i) { for (const userId of users) {
const userId = users[i];
const devices = this.crypto.getStoredDevicesForUser(userId) || []; const devices = this.crypto.getStoredDevicesForUser(userId) || [];
for (let j = 0; j < devices.length; ++j) { for (const deviceInfo of devices) {
const deviceInfo = devices[j];
const key = deviceInfo.getIdentityKey(); const key = deviceInfo.getIdentityKey();
if (key == this.olmDevice.deviceCurve25519Key) { if (key == this.olmDevice.deviceCurve25519Key) {
// don't bother sending to ourself // don't bother sending to ourself
@@ -304,8 +302,7 @@ class OlmDecryption extends DecryptionAlgorithm {
// try each session in turn. // try each session in turn.
const decryptionErrors: Record<string, string> = {}; const decryptionErrors: Record<string, string> = {};
for (let i = 0; i < sessionIds.length; i++) { for (const sessionId of sessionIds) {
const sessionId = sessionIds[i];
try { try {
const payload = await this.olmDevice.decryptMessage( const payload = await this.olmDevice.decryptMessage(
theirDeviceIdentityKey, sessionId, message.type, message.body, theirDeviceIdentityKey, sessionId, message.type, message.body,

View File

@@ -680,8 +680,7 @@ export class Curve25519 implements BackupAlgorithm {
const backupPubKey = decryption.init_with_private_key(privKey); const backupPubKey = decryption.init_with_private_key(privKey);
if (backupPubKey !== this.authData.public_key) { if (backupPubKey !== this.authData.public_key) {
// eslint-disable-next-line no-throw-literal throw new MatrixError({ errcode: MatrixClient.RESTORE_BACKUP_ERROR_BAD_KEY });
throw { errcode: MatrixClient.RESTORE_BACKUP_ERROR_BAD_KEY };
} }
const keys: IMegolmSessionData[] = []; const keys: IMegolmSessionData[] = [];

View File

@@ -2379,9 +2379,8 @@ export class Crypto extends TypedEventEmitter<CryptoEvent, CryptoEventHandlerMap
*/ */
public async getOlmSessionsForUser(userId: string): Promise<Record<string, IUserOlmSession>> { public async getOlmSessionsForUser(userId: string): Promise<Record<string, IUserOlmSession>> {
const devices = this.getStoredDevicesForUser(userId) || []; const devices = this.getStoredDevicesForUser(userId) || [];
const result = {}; const result: { [deviceId: string]: IUserOlmSession } = {};
for (let j = 0; j < devices.length; ++j) { for (const device of devices) {
const device = devices[j];
const deviceKey = device.getIdentityKey(); const deviceKey = device.getIdentityKey();
const sessions = await this.olmDevice.getSessionInfoForDevice(deviceKey); const sessions = await this.olmDevice.getSessionInfoForDevice(deviceKey);
@@ -2682,14 +2681,11 @@ export class Crypto extends TypedEventEmitter<CryptoEvent, CryptoEventHandlerMap
): Promise<Record<string, Record<string, olmlib.IOlmSessionResult>>> { ): Promise<Record<string, Record<string, olmlib.IOlmSessionResult>>> {
const devicesByUser: Record<string, DeviceInfo[]> = {}; const devicesByUser: Record<string, DeviceInfo[]> = {};
for (let i = 0; i < users.length; ++i) { for (const userId of users) {
const userId = users[i];
devicesByUser[userId] = []; devicesByUser[userId] = [];
const devices = this.getStoredDevicesForUser(userId) || []; const devices = this.getStoredDevicesForUser(userId) || [];
for (let j = 0; j < devices.length; ++j) { for (const deviceInfo of devices) {
const deviceInfo = devices[j];
const key = deviceInfo.getIdentityKey(); const key = deviceInfo.getIdentityKey();
if (key == this.olmDevice.deviceCurve25519Key) { if (key == this.olmDevice.deviceCurve25519Key) {
// don't bother setting up session to ourself // don't bother setting up session to ourself

View File

@@ -335,8 +335,7 @@ export async function ensureOlmSessionsForDevices(
const promises: Promise<void>[] = []; const promises: Promise<void>[] = [];
for (const [userId, devices] of Object.entries(devicesByUser)) { for (const [userId, devices] of Object.entries(devicesByUser)) {
const userRes = otkResult[userId] || {}; const userRes = otkResult[userId] || {};
for (let j = 0; j < devices.length; j++) { for (const deviceInfo of devices) {
const deviceInfo = devices[j];
const deviceId = deviceInfo.deviceId; const deviceId = deviceInfo.deviceId;
const key = deviceInfo.getIdentityKey(); const key = deviceInfo.getIdentityKey();

View File

@@ -150,8 +150,7 @@ export class FilterComponent {
}, },
}; };
for (let n = 0; n < Object.keys(literalKeys).length; n++) { for (const name in literalKeys) {
const name = Object.keys(literalKeys)[n];
const matchFunc = literalKeys[name]; const matchFunc = literalKeys[name];
const notName = "not_" + name; const notName = "not_" + name;
const disallowedValues: string[] = this.filterJson[notName]; const disallowedValues: string[] = this.filterJson[notName];

View File

@@ -631,11 +631,6 @@ export class InteractiveAuth {
*/ */
private firstUncompletedStage(flow: IFlow): AuthType | undefined { private firstUncompletedStage(flow: IFlow): AuthType | undefined {
const completed = this.data.completed || []; const completed = this.data.completed || [];
for (let i = 0; i < flow.stages.length; ++i) { return flow.stages.find(stageType => !completed.includes(stageType));
const stageType = flow.stages[i];
if (completed.indexOf(stageType) === -1) {
return stageType;
}
}
} }
} }

View File

@@ -457,8 +457,7 @@ export class EventTimelineSet extends TypedEventEmitter<EmittedEvents, EventTime
let didUpdate = false; let didUpdate = false;
let lastEventWasNew = false; let lastEventWasNew = false;
for (let i = 0; i < events.length; i++) { for (const event of events) {
const event = events[i];
const eventId = event.getId()!; const eventId = event.getId()!;
const existingTimeline = this._eventIdToTimeline.get(eventId); const existingTimeline = this._eventIdToTimeline.get(eventId);

View File

@@ -1008,8 +1008,8 @@ export class Room extends ReadReceipt<RoomEmittedEvents, RoomEventHandlerMap> {
* Removing just the old live timeline whilst preserving previous ones is not supported. * Removing just the old live timeline whilst preserving previous ones is not supported.
*/ */
public resetLiveTimeline(backPaginationToken?: string | null, forwardPaginationToken?: string | null): void { public resetLiveTimeline(backPaginationToken?: string | null, forwardPaginationToken?: string | null): void {
for (let i = 0; i < this.timelineSets.length; i++) { for (const timelineSet of this.timelineSets) {
this.timelineSets[i].resetLiveTimeline( timelineSet.resetLiveTimeline(
backPaginationToken ?? undefined, backPaginationToken ?? undefined,
forwardPaginationToken ?? undefined, forwardPaginationToken ?? undefined,
); );
@@ -2128,8 +2128,8 @@ export class Room extends ReadReceipt<RoomEmittedEvents, RoomEventHandlerMap> {
const { duplicateStrategy, timelineWasEmpty, fromCache } = addLiveEventOptions; const { duplicateStrategy, timelineWasEmpty, fromCache } = addLiveEventOptions;
// add to our timeline sets // add to our timeline sets
for (let i = 0; i < this.timelineSets.length; i++) { for (const timelineSet of this.timelineSets) {
this.timelineSets[i].addLiveEvent(event, { timelineSet.addLiveEvent(event, {
duplicateStrategy, duplicateStrategy,
fromCache, fromCache,
timelineWasEmpty, timelineWasEmpty,
@@ -2214,8 +2214,7 @@ export class Room extends ReadReceipt<RoomEmittedEvents, RoomEventHandlerMap> {
} }
} }
} else { } else {
for (let i = 0; i < this.timelineSets.length; i++) { for (const timelineSet of this.timelineSets) {
const timelineSet = this.timelineSets[i];
if (timelineSet.getFilter()) { if (timelineSet.getFilter()) {
if (timelineSet.getFilter()!.filterRoomTimeline([event]).length) { if (timelineSet.getFilter()!.filterRoomTimeline([event]).length) {
timelineSet.addEventToTimeline(event, timelineSet.addEventToTimeline(event,
@@ -2322,9 +2321,7 @@ export class Room extends ReadReceipt<RoomEmittedEvents, RoomEventHandlerMap> {
thread?.timelineSet.handleRemoteEcho(localEvent, oldEventId, newEventId); thread?.timelineSet.handleRemoteEcho(localEvent, oldEventId, newEventId);
if (shouldLiveInRoom) { if (shouldLiveInRoom) {
for (let i = 0; i < this.timelineSets.length; i++) { for (const timelineSet of this.timelineSets) {
const timelineSet = this.timelineSets[i];
// if it's already in the timeline, update the timeline map. If it's not, add it. // if it's already in the timeline, update the timeline map. If it's not, add it.
timelineSet.handleRemoteEcho(localEvent, oldEventId, newEventId); timelineSet.handleRemoteEcho(localEvent, oldEventId, newEventId);
} }
@@ -2407,8 +2404,8 @@ export class Room extends ReadReceipt<RoomEmittedEvents, RoomEventHandlerMap> {
// if the event was already in the timeline (which will be the case if // if the event was already in the timeline (which will be the case if
// opts.pendingEventOrdering==chronological), we need to update the // opts.pendingEventOrdering==chronological), we need to update the
// timeline map. // timeline map.
for (let i = 0; i < this.timelineSets.length; i++) { for (const timelineSet of this.timelineSets) {
this.timelineSets[i].replaceEventId(oldEventId, newEventId!); timelineSet.replaceEventId(oldEventId, newEventId!);
} }
} }
} else if (newStatus == EventStatus.CANCELLED) { } else if (newStatus == EventStatus.CANCELLED) {
@@ -2643,8 +2640,8 @@ export class Room extends ReadReceipt<RoomEmittedEvents, RoomEventHandlerMap> {
* @param {String[]} eventIds A list of eventIds to remove. * @param {String[]} eventIds A list of eventIds to remove.
*/ */
public removeEvents(eventIds: string[]): void { public removeEvents(eventIds: string[]): void {
for (let i = 0; i < eventIds.length; ++i) { for (const eventId of eventIds) {
this.removeEvent(eventIds[i]); this.removeEvent(eventId);
} }
} }
@@ -2657,8 +2654,8 @@ export class Room extends ReadReceipt<RoomEmittedEvents, RoomEventHandlerMap> {
*/ */
public removeEvent(eventId: string): boolean { public removeEvent(eventId: string): boolean {
let removedAny = false; let removedAny = false;
for (let i = 0; i < this.timelineSets.length; i++) { for (const timelineSet of this.timelineSets) {
const removed = this.timelineSets[i].removeEvent(eventId); const removed = timelineSet.removeEvent(eventId);
if (removed) { if (removed) {
if (removed.isRedaction()) { if (removed.isRedaction()) {
this.revertRedactionLocalEcho(removed); this.revertRedactionLocalEcho(removed);
@@ -2740,8 +2737,7 @@ export class Room extends ReadReceipt<RoomEmittedEvents, RoomEventHandlerMap> {
* @param {Array<MatrixEvent>} events an array of account_data events to add * @param {Array<MatrixEvent>} events an array of account_data events to add
*/ */
public addAccountData(events: MatrixEvent[]): void { public addAccountData(events: MatrixEvent[]): void {
for (let i = 0; i < events.length; i++) { for (const event of events) {
const event = events[i];
if (event.getType() === "m.tag") { if (event.getType() === "m.tag") {
this.addTags(event); this.addTags(event);
} }

View File

@@ -134,8 +134,8 @@ export class User extends TypedEventEmitter<UserEvent, UserEventHandlerMap> {
this.updateModifiedTime(); this.updateModifiedTime();
for (let i = 0; i < eventsToFire.length; i++) { for (const eventToFire of eventsToFire) {
this.emit(eventsToFire[i], event, this); this.emit(eventToFire, event, this);
} }
} }

View File

@@ -135,8 +135,7 @@ export class PushProcessor {
*/ */
public static actionListToActionsObject(actionList: PushRuleAction[]): IActionsObject { public static actionListToActionsObject(actionList: PushRuleAction[]): IActionsObject {
const actionObj: IActionsObject = { notify: false, tweaks: {} }; const actionObj: IActionsObject = { notify: false, tweaks: {} };
for (let i = 0; i < actionList.length; ++i) { for (const action of actionList) {
const action = actionList[i];
if (action === PushRuleActionName.Notify) { if (action === PushRuleActionName.Notify) {
actionObj.notify = true; actionObj.notify = true;
} else if (typeof action === 'object') { } else if (typeof action === 'object') {
@@ -190,15 +189,13 @@ export class PushProcessor {
private static cachedGlobToRegex: Record<string, RegExp> = {}; // $glob: RegExp private static cachedGlobToRegex: Record<string, RegExp> = {}; // $glob: RegExp
private matchingRuleFromKindSet(ev: MatrixEvent, kindset: PushRuleSet): IAnnotatedPushRule | null { private matchingRuleFromKindSet(ev: MatrixEvent, kindset: PushRuleSet): IAnnotatedPushRule | null {
for (let ruleKindIndex = 0; ruleKindIndex < RULEKINDS_IN_ORDER.length; ++ruleKindIndex) { for (const kind of RULEKINDS_IN_ORDER) {
const kind = RULEKINDS_IN_ORDER[ruleKindIndex];
const ruleset = kindset[kind]; const ruleset = kindset[kind];
if (!ruleset) { if (!ruleset) {
continue; continue;
} }
for (let ruleIndex = 0; ruleIndex < ruleset.length; ++ruleIndex) { for (const rule of ruleset) {
const rule = ruleset[ruleIndex];
if (!rule.enabled) { if (!rule.enabled) {
continue; continue;
} }
@@ -478,16 +475,7 @@ export class PushProcessor {
} }
public ruleMatchesEvent(rule: Partial<IPushRule> & Pick<IPushRule, "conditions">, ev: MatrixEvent): boolean { public ruleMatchesEvent(rule: Partial<IPushRule> & Pick<IPushRule, "conditions">, ev: MatrixEvent): boolean {
if (!rule.conditions?.length) return true; return !rule.conditions?.some(cond => !this.eventFulfillsCondition(cond, ev));
let ret = true;
for (let i = 0; i < rule.conditions.length; ++i) {
const cond = rule.conditions[i];
// @ts-ignore
ret &= this.eventFulfillsCondition(cond, ev);
}
//console.log("Rule "+rule.rule_id+(ret ? " matches" : " doesn't match"));
return ret;
} }
/** /**

View File

@@ -101,7 +101,7 @@ export function clearTimeout(key: number): void {
} }
// remove the element from the list // remove the element from the list
let i; let i: number;
for (i = 0; i < callbackList.length; i++) { for (i = 0; i < callbackList.length; i++) {
const cb = callbackList[i]; const cb = callbackList[i];
if (cb.key == key) { if (cb.key == key) {
@@ -137,7 +137,6 @@ function scheduleRealCallback(): void {
} }
function runCallbacks(): void { function runCallbacks(): void {
let cb: Callback;
const timestamp = Date.now(); const timestamp = Date.now();
debuglog("runCallbacks: now:", timestamp); debuglog("runCallbacks: now:", timestamp);
@@ -149,7 +148,7 @@ function runCallbacks(): void {
if (!first || first.runAt > timestamp) { if (!first || first.runAt > timestamp) {
break; break;
} }
cb = callbackList.shift()!; const cb = callbackList.shift()!;
debuglog("runCallbacks: popping", cb.key); debuglog("runCallbacks: popping", cb.key);
callbacksToRun.push(cb); callbacksToRun.push(cb);
} }
@@ -159,8 +158,7 @@ function runCallbacks(): void {
// register their own setTimeouts. // register their own setTimeouts.
scheduleRealCallback(); scheduleRealCallback();
for (let i = 0; i < callbacksToRun.length; i++) { for (const cb of callbacksToRun) {
cb = callbacksToRun[i];
try { try {
cb.func.apply(global, cb.params); cb.func.apply(global, cb.params);
} catch (e) { } catch (e) {

View File

@@ -16,14 +16,13 @@ limitations under the License.
import { UnstableValue } from "matrix-events-sdk"; import { UnstableValue } from "matrix-events-sdk";
import { RendezvousChannel } from "."; import { RendezvousChannel, RendezvousFailureListener, RendezvousFailureReason, RendezvousIntent } from ".";
import { MatrixClient } from "../client"; import { MatrixClient } from "../client";
import { CrossSigningInfo } from "../crypto/CrossSigning"; import { CrossSigningInfo } from "../crypto/CrossSigning";
import { DeviceInfo } from "../crypto/deviceinfo"; import { DeviceInfo } from "../crypto/deviceinfo";
import { buildFeatureSupportMap, Feature, ServerSupport } from "../feature"; import { buildFeatureSupportMap, Feature, ServerSupport } from "../feature";
import { logger } from "../logger"; import { logger } from "../logger";
import { sleep } from "../utils"; import { sleep } from "../utils";
import { RendezvousFailureListener, RendezvousFailureReason, RendezvousIntent } from ".";
enum PayloadType { enum PayloadType {
Start = 'm.login.start', Start = 'm.login.start',

View File

@@ -454,8 +454,8 @@ export class LocalIndexedDBStoreBackend implements IIndexedDBBackend {
return utils.promiseTry<void>(() => { return utils.promiseTry<void>(() => {
const txn = this.db!.transaction(["accountData"], "readwrite"); const txn = this.db!.transaction(["accountData"], "readwrite");
const store = txn.objectStore("accountData"); const store = txn.objectStore("accountData");
for (let i = 0; i < accountData.length; i++) { for (const event of accountData) {
store.put(accountData[i]); // put == UPSERT store.put(event); // put == UPSERT
} }
return txnAsPromise(txn).then(); return txnAsPromise(txn).then();
}); });

View File

@@ -400,47 +400,51 @@ export class SyncAccumulator {
acc[INVITED_COUNT_KEY] = sum[INVITED_COUNT_KEY] || acc[INVITED_COUNT_KEY]; acc[INVITED_COUNT_KEY] = sum[INVITED_COUNT_KEY] || acc[INVITED_COUNT_KEY];
} }
if (data.ephemeral && data.ephemeral.events) { data.ephemeral?.events?.forEach((e) => {
data.ephemeral.events.forEach((e) => { // We purposefully do not persist m.typing events.
// We purposefully do not persist m.typing events. // Technically you could refresh a browser before the timer on a
// Technically you could refresh a browser before the timer on a // typing event is up, so it'll look like you aren't typing when
// typing event is up, so it'll look like you aren't typing when // you really still are. However, the alternative is worse. If
// you really still are. However, the alternative is worse. If // we do persist typing events, it will look like people are
// we do persist typing events, it will look like people are // typing forever until someone really does start typing (which
// typing forever until someone really does start typing (which // will prompt Synapse to send down an actual m.typing event to
// will prompt Synapse to send down an actual m.typing event to // clobber the one we persisted).
// clobber the one we persisted). if (e.type !== EventType.Receipt || !e.content) {
if (e.type !== EventType.Receipt || !e.content) { // This means we'll drop unknown ephemeral events but that
// This means we'll drop unknown ephemeral events but that // seems okay.
// seems okay. return;
return; }
} // Handle m.receipt events. They clobber based on:
// Handle m.receipt events. They clobber based on: // (user_id, receipt_type)
// (user_id, receipt_type) // but they are keyed in the event as:
// but they are keyed in the event as: // content:{ $event_id: { $receipt_type: { $user_id: {json} }}}
// content:{ $event_id: { $receipt_type: { $user_id: {json} }}} // so store them in the former so we can accumulate receipt deltas
// so store them in the former so we can accumulate receipt deltas // quickly and efficiently (we expect a lot of them). Fold the
// quickly and efficiently (we expect a lot of them). Fold the // receipt type into the key name since we only have 1 at the
// receipt type into the key name since we only have 1 at the // moment (m.read) and nested JSON objects are slower and more
// moment (m.read) and nested JSON objects are slower and more // of a hassle to work with. We'll inflate this back out when
// of a hassle to work with. We'll inflate this back out when // getJSON() is called.
// getJSON() is called. Object.keys(e.content).forEach((eventId) => {
Object.keys(e.content).forEach((eventId) => { Object.entries<{
Object.entries(e.content[eventId]).forEach(([key, value]) => { [eventId: string]: {
if (!isSupportedReceiptType(key)) return; [receiptType: string]: {
[userId: string]: IMinimalEvent;
};
};
}>(e.content[eventId]).forEach(([key, value]) => {
if (!isSupportedReceiptType(key)) return;
Object.keys(value!).forEach((userId) => { Object.keys(value).forEach((userId) => {
// clobber on user ID // clobber on user ID
currentData._readReceipts[userId] = { currentData._readReceipts[userId] = {
data: e.content[eventId][key][userId], data: e.content[eventId][key][userId],
type: key as ReceiptType, type: key as ReceiptType,
eventId: eventId, eventId: eventId,
}; };
});
}); });
}); });
}); });
} });
// if we got a limited sync, we need to remove all timeline entries or else // if we got a limited sync, we need to remove all timeline entries or else
// we will have gaps in the timeline. // we will have gaps in the timeline.
@@ -551,7 +555,7 @@ export class SyncAccumulator {
}; };
// Add account data // Add account data
Object.keys(roomData._accountData).forEach((evType) => { Object.keys(roomData._accountData).forEach((evType) => {
roomJson.account_data.events.push(roomData._accountData[evType] as IMinimalEvent); roomJson.account_data.events.push(roomData._accountData[evType]);
}); });
// Add receipt data // Add receipt data

View File

@@ -821,7 +821,6 @@ export class SyncApi {
let data: ISyncResponse; let data: ISyncResponse;
try { try {
//debuglog('Starting sync since=' + syncToken);
if (!this.currentSyncRequest) { if (!this.currentSyncRequest) {
this.currentSyncRequest = this.doSyncRequest(syncOptions, syncToken); this.currentSyncRequest = this.doSyncRequest(syncOptions, syncToken);
} }
@@ -834,8 +833,6 @@ export class SyncApi {
this.currentSyncRequest = undefined; this.currentSyncRequest = undefined;
} }
//debuglog('Completed sync, next_batch=' + data.next_batch);
// set the sync token NOW *before* processing the events. We do this so // set the sync token NOW *before* processing the events. We do this so
// if something barfs on an event we can skip it rather than constantly // if something barfs on an event we can skip it rather than constantly
// polling with the same token. // polling with the same token.
@@ -1747,11 +1744,10 @@ export class SyncApi {
private processEventsForNotifs(room: Room, timelineEventList: MatrixEvent[]): void { private processEventsForNotifs(room: Room, timelineEventList: MatrixEvent[]): void {
// gather our notifications into this.notifEvents // gather our notifications into this.notifEvents
if (this.client.getNotifTimelineSet()) { if (this.client.getNotifTimelineSet()) {
for (let i = 0; i < timelineEventList.length; i++) { for (const event of timelineEventList) {
const pushActions = this.client.getPushActionsForEvent(timelineEventList[i]); const pushActions = this.client.getPushActionsForEvent(event);
if (pushActions && pushActions.notify && if (pushActions?.notify && pushActions.tweaks?.highlight) {
pushActions.tweaks && pushActions.tweaks.highlight) { this.notifEvents.push(event);
this.notifEvents.push(timelineEventList[i]);
} }
} }
} }

View File

@@ -190,9 +190,9 @@ export function isFunction(value: any) {
*/ */
// note using 'keys' here would shadow the 'keys' function defined above // note using 'keys' here would shadow the 'keys' function defined above
export function checkObjectHasKeys(obj: object, keys: string[]) { export function checkObjectHasKeys(obj: object, keys: string[]) {
for (let i = 0; i < keys.length; i++) { for (const key of keys) {
if (!obj.hasOwnProperty(keys[i])) { if (!obj.hasOwnProperty(key)) {
throw new Error("Missing required key: " + keys[i]); throw new Error("Missing required key: " + key);
} }
} }
} }

View File

@@ -2237,8 +2237,8 @@ export class MatrixCall extends TypedEventEmitter<CallEvent, CallEventHandlerMap
} }
function setTracksEnabled(tracks: Array<MediaStreamTrack>, enabled: boolean): void { function setTracksEnabled(tracks: Array<MediaStreamTrack>, enabled: boolean): void {
for (let i = 0; i < tracks.length; i++) { for (const track of tracks) {
tracks[i].enabled = enabled; track.enabled = enabled;
} }
} }

View File

@@ -140,7 +140,6 @@ export class CallEventHandler {
const type = event.getType() as EventType; const type = event.getType() as EventType;
const weSentTheEvent = event.getSender() === this.client.credentials.userId; const weSentTheEvent = event.getSender() === this.client.credentials.userId;
let call = content.call_id ? this.calls.get(content.call_id) : undefined; let call = content.call_id ? this.calls.get(content.call_id) : undefined;
//console.info("RECV %s content=%s", type, JSON.stringify(content));
if (type === EventType.CallInvite) { if (type === EventType.CallInvite) {
// ignore invites you send // ignore invites you send

View File

@@ -220,9 +220,9 @@ export class CallFeed extends TypedEventEmitter<CallFeedEvent, EventHandlerMap>
this.analyser.getFloatFrequencyData(this.frequencyBinCount!); this.analyser.getFloatFrequencyData(this.frequencyBinCount!);
let maxVolume = -Infinity; let maxVolume = -Infinity;
for (let i = 0; i < this.frequencyBinCount!.length; i++) { for (const volume of this.frequencyBinCount!) {
if (this.frequencyBinCount![i] > maxVolume) { if (volume > maxVolume) {
maxVolume = this.frequencyBinCount![i]; maxVolume = volume;
} }
} }
@@ -233,9 +233,7 @@ export class CallFeed extends TypedEventEmitter<CallFeedEvent, EventHandlerMap>
let newSpeaking = false; let newSpeaking = false;
for (let i = 0; i < this.speakingVolumeSamples.length; i++) { for (const volume of this.speakingVolumeSamples) {
const volume = this.speakingVolumeSamples[i];
if (volume > this.speakingThreshold) { if (volume > this.speakingThreshold) {
newSpeaking = true; newSpeaking = true;
break; break;