From e323d917a418ab0f9e117b691c673cb3984d8a26 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Fri, 15 Mar 2019 14:06:28 -0600 Subject: [PATCH 001/159] Support default push rules for when servers are outdated --- src/pushprocessor.js | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/src/pushprocessor.js b/src/pushprocessor.js index f7ee4721f..b5a50121d 100644 --- a/src/pushprocessor.js +++ b/src/pushprocessor.js @@ -23,6 +23,12 @@ import {escapeRegExp, globToRegexp} from "./utils"; const RULEKINDS_IN_ORDER = ['override', 'content', 'room', 'sender', 'underride']; +// The default override rules to apply when calculating actions for an event. These +// defaults apply under no other circumstances to avoid confusing the client with server +// state. +const DEFAULT_OVERRIDE_RULES = [ +]; + /** * Construct a Push Processor. * @constructor @@ -312,6 +318,26 @@ function PushProcessor(client) { return actionObj; }; + const applyRuleDefaults = function(clientRuleset) { + // Deep clone the object before we mutate it + const ruleset = JSON.parse(JSON.stringify(clientRuleset)); + + if (!clientRuleset['global']) clientRuleset['global'] = {}; + if (!clientRuleset['global']['override']) clientRuleset['global']['override'] = []; + + // Apply default overrides + const globalOverrides = clientRuleset['global']['override']; + for (const override of DEFAULT_OVERRIDE_RULES) { + const existingRule = globalOverrides.find((r) => r.rule_id === override.rule_id); + if (!existingRule) { + console.warn(`Adding global override for ${override.rule_id} because is is missing`); + globalOverrides.push(override); + } + } + + return ruleset; + }; + this.ruleMatchesEvent = function(rule, ev) { let ret = true; for (let i = 0; i < rule.conditions.length; ++i) { @@ -331,7 +357,8 @@ function PushProcessor(client) { * @return {PushAction} */ this.actionsForEvent = function(ev) { - return pushActionsForEventAndRulesets(ev, client.pushRules); + const rules = applyRuleDefaults(client.pushRules); + return pushActionsForEventAndRulesets(ev, rules); }; /** From b3d2d39b60e8e1b3820fc06cc0a20d6fb4470705 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Fri, 15 Mar 2019 14:07:15 -0600 Subject: [PATCH 002/159] Add tombstone rule as a default rule in support of MSC1930 Part of https://github.com/vector-im/riot-web/issues/8447 See also https://github.com/matrix-org/matrix-doc/pull/1930 --- src/pushprocessor.js | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/pushprocessor.js b/src/pushprocessor.js index b5a50121d..675db1895 100644 --- a/src/pushprocessor.js +++ b/src/pushprocessor.js @@ -27,6 +27,26 @@ const RULEKINDS_IN_ORDER = ['override', 'content', 'room', 'sender', 'underride' // defaults apply under no other circumstances to avoid confusing the client with server // state. const DEFAULT_OVERRIDE_RULES = [ + { + // For homeservers which don't support MSC1930 yet + rule_id: ".m.rule.tombstone", + default: true, + enabled: true, + conditions: [ + { + kind: "event_match", + key: "type", + pattern: "m.room.tombstone", + }, + ], + actions: [ + "notify", + { + set_tweak: "highlight", + value: true, + }, + ], + }, ]; /** From 42f181cc7bad956a19f2d7d23054543d57351b7d Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Fri, 15 Mar 2019 14:11:29 -0600 Subject: [PATCH 003/159] Appease the linter --- src/pushprocessor.js | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/pushprocessor.js b/src/pushprocessor.js index 675db1895..5bb6699d9 100644 --- a/src/pushprocessor.js +++ b/src/pushprocessor.js @@ -342,15 +342,22 @@ function PushProcessor(client) { // Deep clone the object before we mutate it const ruleset = JSON.parse(JSON.stringify(clientRuleset)); - if (!clientRuleset['global']) clientRuleset['global'] = {}; - if (!clientRuleset['global']['override']) clientRuleset['global']['override'] = []; + if (!clientRuleset['global']) { + clientRuleset['global'] = {}; + } + if (!clientRuleset['global']['override']) { + clientRuleset['global']['override'] = []; + } // Apply default overrides const globalOverrides = clientRuleset['global']['override']; for (const override of DEFAULT_OVERRIDE_RULES) { - const existingRule = globalOverrides.find((r) => r.rule_id === override.rule_id); + const existingRule = globalOverrides + .find((r) => r.rule_id === override.rule_id); + if (!existingRule) { - console.warn(`Adding global override for ${override.rule_id} because is is missing`); + const ruleId = override.rule_id; + console.warn(`Adding default global override for ${ruleId}`); globalOverrides.push(override); } } From f90c91dded718c70fb0d9d4ea7099d8c0496dd3f Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 1 Apr 2019 18:43:48 -0600 Subject: [PATCH 004/159] Refuse to link live timelines into the forwards/backwards position See https://github.com/vector-im/riot-web/issues/8593#issuecomment-478681816 Previously (https://github.com/matrix-org/matrix-js-sdk/pull/873) we allowed half-linking timelines to each other if they satisfy the conditions, however this appears to not be helping. Instead, it seems like the timelines are getting stuck in a position where one direction is spliced but the other is broken. To avoid this case, we'll just avoid splicing in both directions when one of the directions is invalid. --- src/models/event-timeline-set.js | 37 ++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/src/models/event-timeline-set.js b/src/models/event-timeline-set.js index cd199be2e..9675f7300 100644 --- a/src/models/event-timeline-set.js +++ b/src/models/event-timeline-set.js @@ -413,25 +413,30 @@ EventTimelineSet.prototype.addEventsToTimeline = function(events, toStartOfTimel const existingIsLive = existingTimeline === this._liveTimeline; const timelineIsLive = timeline === this._liveTimeline; - if (direction === EventTimeline.BACKWARDS && existingIsLive) { + const backwardsIsLive = direction === EventTimeline.BACKWARDS && existingIsLive; + const forwardsIsLive = direction === EventTimeline.FORWARDS && timelineIsLive; + + if (backwardsIsLive || forwardsIsLive) { // The live timeline should never be spliced into a non-live position. - console.warn( - "Refusing to set a preceding existingTimeLine on our " + - "timeline as the existingTimeLine is live (" + existingTimeline + ")", - ); - } else { - timeline.setNeighbouringTimeline(existingTimeline, direction); + // We use independent logging to better discover the problem at a glance. + console.warn({backwardsIsLive, forwardsIsLive}); // debugging + if (backwardsIsLive) { + console.warn( + "Refusing to set a preceding existingTimeLine on our " + + "timeline as the existingTimeLine is live (" + existingTimeline + ")", + ); + } + if (forwardsIsLive) { + console.warn( + "Refusing to set our preceding timeline on a existingTimeLine " + + "as our timeline is live (" + timeline + ")", + ); + } + continue; // abort splicing - try next event } - if (inverseDirection === EventTimeline.BACKWARDS && timelineIsLive) { - // The live timeline should never be spliced into a non-live position. - console.warn( - "Refusing to set our preceding timeline on a existingTimeLine " + - "as our timeline is live (" + timeline + ")", - ); - } else { - existingTimeline.setNeighbouringTimeline(timeline, inverseDirection); - } + timeline.setNeighbouringTimeline(existingTimeline, direction); + existingTimeline.setNeighbouringTimeline(timeline, inverseDirection); timeline = existingTimeline; didUpdate = true; From d1e64d0cfb043a24a558f2ab926fa37a30f18f1f Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Tue, 2 Apr 2019 23:22:36 -0400 Subject: [PATCH 005/159] support both the incorrect and correct MAC methods also do some refactoring to make it easier to support choices in the other methods in the future --- package.json | 2 +- spec/unit/crypto/verification/sas.spec.js | 218 +++++++++++++--------- src/crypto/verification/SAS.js | 117 +++++++----- yarn.lock | 6 +- 4 files changed, 203 insertions(+), 140 deletions(-) diff --git a/package.json b/package.json index 04e297877..b48cfd423 100644 --- a/package.json +++ b/package.json @@ -82,7 +82,7 @@ "matrix-mock-request": "^1.2.2", "mocha": "^5.2.0", "mocha-jenkins-reporter": "^0.4.0", - "olm": "https://matrix.org/packages/npm/olm/olm-3.1.0-pre1.tgz", + "olm": "https://matrix.org/packages/npm/olm/olm-3.1.0-pre3.tgz", "rimraf": "^2.5.4", "source-map-support": "^0.4.11", "sourceify": "^0.1.0", diff --git a/spec/unit/crypto/verification/sas.spec.js b/spec/unit/crypto/verification/sas.spec.js index c1c1d2a45..902d48083 100644 --- a/spec/unit/crypto/verification/sas.spec.js +++ b/spec/unit/crypto/verification/sas.spec.js @@ -58,105 +58,143 @@ describe("SAS verification", function() { expect(spy).toHaveBeenCalled(); }); - it("should verify a key", async function() { - const [alice, bob] = await makeTestClients( - [ - {userId: "@alice:example.com", deviceId: "Osborne2"}, - {userId: "@bob:example.com", deviceId: "Dynabook"}, - ], - { - verificationMethods: [verificationMethods.SAS], - }, - ); - - alice.setDeviceVerified = expect.createSpy(); - alice.getDeviceEd25519Key = () => { - return "alice+base64+ed25519+key"; - }; - alice.getStoredDevice = () => { - return DeviceInfo.fromStorage( - { - keys: { - "ed25519:Dynabook": "bob+base64+ed25519+key", - }, - }, - "Dynabook", - ); - }; - alice.downloadKeys = () => { - return Promise.resolve(); - }; - - bob.setDeviceVerified = expect.createSpy(); - bob.getStoredDevice = () => { - return DeviceInfo.fromStorage( - { - keys: { - "ed25519:Osborne2": "alice+base64+ed25519+key", - }, - }, - "Osborne2", - ); - }; - bob.getDeviceEd25519Key = () => { - return "bob+base64+ed25519+key"; - }; - bob.downloadKeys = () => { - return Promise.resolve(); - }; - + describe("verification", function() { + let alice; + let bob; let aliceSasEvent; let bobSasEvent; + let aliceVerifier; + let bobPromise; - const bobPromise = new Promise((resolve, reject) => { - bob.on("crypto.verification.start", (verifier) => { - verifier.on("show_sas", (e) => { - if (!e.sas.emoji || !e.sas.decimal) { - e.cancel(); - } else if (!aliceSasEvent) { - bobSasEvent = e; - } else { - try { - expect(e.sas).toEqual(aliceSasEvent.sas); - e.confirm(); - aliceSasEvent.confirm(); - } catch (error) { - e.mismatch(); - aliceSasEvent.mismatch(); + beforeEach(async function() { + [alice, bob] = await makeTestClients( + [ + {userId: "@alice:example.com", deviceId: "Osborne2"}, + {userId: "@bob:example.com", deviceId: "Dynabook"}, + ], + { + verificationMethods: [verificationMethods.SAS], + }, + ); + + alice.setDeviceVerified = expect.createSpy(); + alice.getDeviceEd25519Key = () => { + return "alice+base64+ed25519+key"; + }; + alice.getStoredDevice = () => { + return DeviceInfo.fromStorage( + { + keys: { + "ed25519:Dynabook": "bob+base64+ed25519+key", + }, + }, + "Dynabook", + ); + }; + alice.downloadKeys = () => { + return Promise.resolve(); + }; + + bob.setDeviceVerified = expect.createSpy(); + bob.getStoredDevice = () => { + return DeviceInfo.fromStorage( + { + keys: { + "ed25519:Osborne2": "alice+base64+ed25519+key", + }, + }, + "Osborne2", + ); + }; + bob.getDeviceEd25519Key = () => { + return "bob+base64+ed25519+key"; + }; + bob.downloadKeys = () => { + return Promise.resolve(); + }; + + aliceSasEvent = null; + bobSasEvent = null; + + bobPromise = new Promise((resolve, reject) => { + bob.on("crypto.verification.start", (verifier) => { + verifier.on("show_sas", (e) => { + if (!e.sas.emoji || !e.sas.decimal) { + e.cancel(); + } else if (!aliceSasEvent) { + bobSasEvent = e; + } else { + try { + expect(e.sas).toEqual(aliceSasEvent.sas); + e.confirm(); + aliceSasEvent.confirm(); + } catch (error) { + e.mismatch(); + aliceSasEvent.mismatch(); + } } - } + }); + resolve(verifier); }); - resolve(verifier); + }); + + aliceVerifier = alice.beginKeyVerification( + verificationMethods.SAS, bob.getUserId(), bob.deviceId, + ); + aliceVerifier.on("show_sas", (e) => { + if (!e.sas.emoji || !e.sas.decimal) { + e.cancel(); + } else if (!bobSasEvent) { + aliceSasEvent = e; + } else { + try { + expect(e.sas).toEqual(bobSasEvent.sas); + e.confirm(); + bobSasEvent.confirm(); + } catch (error) { + e.mismatch(); + bobSasEvent.mismatch(); + } + } }); }); - const aliceVerifier = alice.beginKeyVerification( - verificationMethods.SAS, bob.getUserId(), bob.deviceId, - ); - aliceVerifier.on("show_sas", (e) => { - if (!e.sas.emoji || !e.sas.decimal) { - e.cancel(); - } else if (!bobSasEvent) { - aliceSasEvent = e; - } else { - try { - expect(e.sas).toEqual(bobSasEvent.sas); - e.confirm(); - bobSasEvent.confirm(); - } catch (error) { - e.mismatch(); - bobSasEvent.mismatch(); - } - } + it("should verify a key", async function() { + await Promise.all([ + aliceVerifier.verify(), + bobPromise.then((verifier) => verifier.verify()), + ]); + expect(alice.setDeviceVerified) + .toHaveBeenCalledWith(bob.getUserId(), bob.deviceId); + expect(bob.setDeviceVerified) + .toHaveBeenCalledWith(alice.getUserId(), alice.deviceId); + }); + + it("should be able to verify using the old MAC", async function() { + // pretend that Alice can only understand the old (incorrect) MAC, + // and make sure that she can still verify with Bob + const origSendToDevice = alice.sendToDevice; + alice.sendToDevice = function(type, map) { + if (type === "m.key.verification.start") { + // Note: this modifies not only the message that Bob + // receives, but also the copy of the message that Alice + // has, since it is the same object. If this does not + // happen, the verification will fail due to a hash + // commitment mismatch. + map[bob.getUserId()][bob.deviceId] + .message_authentication_codes = ['hmac-sha256']; + } + return origSendToDevice.call(this, type, map); + }; + await Promise.all([ + aliceVerifier.verify(), + bobPromise.then((verifier) => verifier.verify()), + ]); + expect(alice.setDeviceVerified) + .toHaveBeenCalledWith(bob.getUserId(), bob.deviceId); + expect(bob.setDeviceVerified) + .toHaveBeenCalledWith(alice.getUserId(), alice.deviceId); }); - await Promise.all([ - aliceVerifier.verify(), - bobPromise.then((verifier) => verifier.verify()), - ]); - expect(alice.setDeviceVerified) - .toHaveBeenCalledWith(bob.getUserId(), bob.deviceId); - expect(bob.setDeviceVerified) - .toHaveBeenCalledWith(alice.getUserId(), alice.deviceId); }); it("should send a cancellation message on error", async function() { diff --git a/src/crypto/verification/SAS.js b/src/crypto/verification/SAS.js index 329d13fea..e17729b04 100644 --- a/src/crypto/verification/SAS.js +++ b/src/crypto/verification/SAS.js @@ -143,17 +143,44 @@ function generateEmojiSas(sasBytes) { return emojis.map((num) => emojiMapping[num]); } +const sasGenerators = { + decimal: generateDecimalSas, + emoji: generateEmojiSas, +}; + function generateSas(sasBytes, methods) { const sas = {}; - if (methods.includes("decimal")) { - sas["decimal"] = generateDecimalSas(sasBytes); - } - if (methods.includes("emoji")) { - sas["emoji"] = generateEmojiSas(sasBytes); + for (const method of methods) { + if (method in sasGenerators) { + sas[method] = sasGenerators[method](sasBytes); + } } return sas; } +const macMethods = { + "hmac-sha256-bits": "calculate_mac", + "hmac-sha256": "calculate_mac_long_kdf", +}; + +/* lists of algorithms/methods that are supported. The key agreement, hashes, + * and MAC lists should be sorted in order of preference (most preferred + * first). + */ +const KEY_AGREEMENT_LIST = ["curve25519"]; +const HASHES_LIST = ["sha256"]; +const MAC_LIST = ["hmac-sha256-bits", "hmac-sha256"]; +const SAS_LIST = Object.keys(sasGenerators); + +const KEY_AGREEMENT_SET = new Set(KEY_AGREEMENT_LIST); +const HASHES_SET = new Set(HASHES_LIST); +const MAC_SET = new Set(MAC_LIST); +const SAS_SET = new Set(SAS_LIST); + +function intersection(anArray, aSet) { + return anArray instanceof Array ? anArray.filter(x => aSet.has(x)) : []; +} + /** * @alias module:crypto/verification/SAS * @extends {module:crypto/verification/Base} @@ -181,11 +208,11 @@ export default class SAS extends Base { const initialMessage = { method: SAS.NAME, from_device: this._baseApis.deviceId, - key_agreement_protocols: ["curve25519"], - hashes: ["sha256"], - message_authentication_codes: ["hmac-sha256"], + key_agreement_protocols: KEY_AGREEMENT_LIST, + hashes: HASHES_LIST, + message_authentication_codes: MAC_LIST, // FIXME: allow app to specify what SAS methods can be used - short_authentication_string: ["decimal", "emoji"], + short_authentication_string: SAS_LIST, transaction_id: this.transactionId, }; this._sendToDevice("m.key.verification.start", initialMessage); @@ -193,19 +220,19 @@ export default class SAS extends Base { let e = await this._waitForEvent("m.key.verification.accept"); let content = e.getContent(); - if (!(content.key_agreement_protocol === "curve25519" - && content.hash === "sha256" - && content.message_authentication_code === "hmac-sha256" - && content.short_authentication_string instanceof Array - && (content.short_authentication_string.includes("decimal") - || content.short_authentication_string.includes("emoji")))) { + const sasMethods + = intersection(content.short_authentication_string, SAS_SET); + if (!(KEY_AGREEMENT_SET.has(content.key_agreement_protocol) + && HASHES_SET.has(content.hash) + && MAC_SET.has(content.message_authentication_code) + && sasMethods.length)) { throw newUnknownMethodError(); } if (typeof content.commitment !== "string") { throw newInvalidMessageError(); } + const macMethod = content.message_authentication_code; const hashCommitment = content.commitment; - const sasMethods = content.short_authentication_string; const olmSAS = new global.Olm.SAS(); try { this._sendToDevice("m.key.verification.key", { @@ -217,6 +244,7 @@ export default class SAS extends Base { // FIXME: make sure event is properly formed content = e.getContent(); const commitmentStr = content.key + anotherjson.stringify(initialMessage); + // TODO: use selected hash function (when we support multiple) if (olmutil.sha256(commitmentStr) !== hashCommitment) { throw newMismatchedCommitmentError(); } @@ -231,7 +259,7 @@ export default class SAS extends Base { this.emit("show_sas", { sas: generateSas(sasBytes, sasMethods), confirm: () => { - this._sendMAC(olmSAS); + this._sendMAC(olmSAS, macMethod); resolve(); }, cancel: () => reject(newUserCancelledError()), @@ -245,7 +273,7 @@ export default class SAS extends Base { verifySAS, ]); content = e.getContent(); - await this._checkMAC(olmSAS, content); + await this._checkMAC(olmSAS, content, macMethod); } finally { olmSAS.free(); } @@ -253,34 +281,31 @@ export default class SAS extends Base { async _doRespondVerification() { let content = this.startEvent.getContent(); - if (!(content.key_agreement_protocols instanceof Array - && content.key_agreement_protocols.includes("curve25519") - && content.hashes instanceof Array - && content.hashes.includes("sha256") - && content.message_authentication_codes instanceof Array - && content.message_authentication_codes.includes("hmac-sha256") - && content.short_authentication_string instanceof Array - && (content.short_authentication_string.includes("decimal") - || content.short_authentication_string.includes("emoji")))) { + const keyAgreement + = intersection(content.key_agreement_protocols, KEY_AGREEMENT_SET)[0]; + const hashMethod + = intersection(content.hashes, HASHES_SET)[0]; + const macMethod + = intersection(content.message_authentication_codes, MAC_SET)[0]; + // FIXME: allow app to specify what SAS methods can be used + const sasMethods + = intersection(content.short_authentication_string, SAS_SET); + if (!(keyAgreement !== undefined + && hashMethod !== undefined + && macMethod !== undefined + && sasMethods.length)) { throw newUnknownMethodError(); } const olmSAS = new global.Olm.SAS(); - const sasMethods = []; - // FIXME: allow app to specify what SAS methods can be used - if (content.short_authentication_string.includes("decimal")) { - sasMethods.push("decimal"); - } - if (content.short_authentication_string.includes("emoji")) { - sasMethods.push("emoji"); - } try { const commitmentStr = olmSAS.get_pubkey() + anotherjson.stringify(content); this._sendToDevice("m.key.verification.accept", { - key_agreement_protocol: "curve25519", - hash: "sha256", - message_authentication_code: "hmac-sha256", + key_agreement_protocol: keyAgreement, + hash: hashMethod, + message_authentication_code: macMethod, short_authentication_string: sasMethods, + // TODO: use selected hash function (when we support multiple) commitment: olmutil.sha256(commitmentStr), }); @@ -302,7 +327,7 @@ export default class SAS extends Base { this.emit("show_sas", { sas: generateSas(sasBytes, sasMethods), confirm: () => { - this._sendMAC(olmSAS); + this._sendMAC(olmSAS, macMethod); resolve(); }, cancel: () => reject(newUserCancelledError()), @@ -316,13 +341,13 @@ export default class SAS extends Base { verifySAS, ]); content = e.getContent(); - await this._checkMAC(olmSAS, content); + await this._checkMAC(olmSAS, content, macMethod); } finally { olmSAS.free(); } } - _sendMAC(olmSAS) { + _sendMAC(olmSAS, method) { const keyId = `ed25519:${this._baseApis.deviceId}`; const mac = {}; const baseInfo = "MATRIX_KEY_VERIFICATION_MAC" @@ -330,24 +355,24 @@ export default class SAS extends Base { + this.userId + this.deviceId + this.transactionId; - mac[keyId] = olmSAS.calculate_mac( + mac[keyId] = olmSAS[macMethods[method]]( this._baseApis.getDeviceEd25519Key(), baseInfo + keyId, ); - const keys = olmSAS.calculate_mac( + const keys = olmSAS[macMethods[method]]( keyId, baseInfo + "KEY_IDS", ); this._sendToDevice("m.key.verification.mac", { mac, keys }); } - async _checkMAC(olmSAS, content) { + async _checkMAC(olmSAS, content, method) { const baseInfo = "MATRIX_KEY_VERIFICATION_MAC" + this.userId + this.deviceId + this._baseApis.getUserId() + this._baseApis.deviceId + this.transactionId; - if (content.keys !== olmSAS.calculate_mac( + if (content.keys !== olmSAS[macMethods[method]]( Object.keys(content.mac).sort().join(","), baseInfo + "KEY_IDS", )) { @@ -355,7 +380,7 @@ export default class SAS extends Base { } await this._verifyKeys(this.userId, content.mac, (keyId, device, keyInfo) => { - if (keyInfo !== olmSAS.calculate_mac( + if (keyInfo !== olmSAS[macMethods[method]]( device.keys[keyId], baseInfo + keyId, )) { diff --git a/yarn.lock b/yarn.lock index 310dc6081..50e2c6d15 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3520,9 +3520,9 @@ object.pick@^1.3.0: dependencies: isobject "^3.0.1" -"olm@https://matrix.org/packages/npm/olm/olm-3.1.0-pre1.tgz": - version "3.1.0-pre1" - resolved "https://matrix.org/packages/npm/olm/olm-3.1.0-pre1.tgz#54f14fa901ff5c81db516b3b2adb294b91726eaa" +"olm@https://matrix.org/packages/npm/olm/olm-3.1.0-pre3.tgz": + version "3.1.0-pre3" + resolved "https://matrix.org/packages/npm/olm/olm-3.1.0-pre3.tgz#525aa8191b4b6fcb07a3aa6815687780b99be411" once@1.x, once@^1.3.0: version "1.4.0" From 6ba7e85e2423fcfc11ea932d170edf554da2e149 Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Wed, 3 Apr 2019 16:45:41 +0100 Subject: [PATCH 006/159] Ensure we have crypto before accessing it in `Room` model If crypto startup has failed, we shouldn't try to access any of its methods. This fixes a variant of this in the `Room` model. --- spec/unit/room.spec.js | 3 +++ src/models/room.js | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/spec/unit/room.spec.js b/spec/unit/room.spec.js index 4a8ea7e59..1b1d6fb29 100644 --- a/spec/unit/room.spec.js +++ b/spec/unit/room.spec.js @@ -1318,6 +1318,9 @@ describe("Room", function() { // events should already be MatrixEvents return function(event) {return event;}; }, + isCryptoEnabled() { + return true; + }, isRoomEncrypted: function() { return false; }, diff --git a/src/models/room.js b/src/models/room.js index 2696aa6e3..276b9e5cc 100644 --- a/src/models/room.js +++ b/src/models/room.js @@ -496,7 +496,7 @@ Room.prototype.loadMembersIfNeeded = function() { const inMemoryUpdate = this._loadMembers().then((result) => { this.currentState.setOutOfBandMembers(result.memberEvents); // now the members are loaded, start to track the e2e devices if needed - if (this._client.isRoomEncrypted(this.roomId)) { + if (this._client.isCryptoEnabled() && this._client.isRoomEncrypted(this.roomId)) { this._client._crypto.trackRoomDevices(this.roomId); } return result.fromServer; From dd007354099ea58a8f3c96b50c6366ad61f5b7aa Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Thu, 4 Apr 2019 10:17:44 +0100 Subject: [PATCH 007/159] Degrade `IndexedDBStore` back to memory only on failure IndexedDB may fail at any moment with `QuoteExceededError` when space is low or other random issues we can't control. Since `IndexedDBStore` is just a cache for improving performance, we can give up on it if it fails. This causes `IndexedDBStore` to degrade in place back to using memory only. This allow (for example) login to complete even if IndexedDB is exploding. Hopefully improves https://github.com/vector-im/riot-web/issues/7769 --- src/store/indexeddb.js | 90 ++++++++++++++++++++++++++++++------------ 1 file changed, 65 insertions(+), 25 deletions(-) diff --git a/src/store/indexeddb.js b/src/store/indexeddb.js index 7842af5f3..5be7d251f 100644 --- a/src/store/indexeddb.js +++ b/src/store/indexeddb.js @@ -15,6 +15,8 @@ See the License for the specific language governing permissions and limitations under the License. */ +/* eslint-disable no-invalid-this */ + import Promise from 'bluebird'; import {MemoryStore} from "./memory"; import utils from "../utils"; @@ -146,28 +148,28 @@ IndexedDBStore.prototype.startup = function() { * client state to where it was at the last save, or null if there * is no saved sync data. */ -IndexedDBStore.prototype.getSavedSync = function() { +IndexedDBStore.prototype.getSavedSync = degradable(function() { return this.backend.getSavedSync(); -}; +}, "getSavedSync"); /** @return {Promise} whether or not the database was newly created in this session. */ -IndexedDBStore.prototype.isNewlyCreated = function() { +IndexedDBStore.prototype.isNewlyCreated = degradable(function() { return this.backend.isNewlyCreated(); -}; +}, "isNewlyCreated"); /** * @return {Promise} If there is a saved sync, the nextBatch token * for this sync, otherwise null. */ -IndexedDBStore.prototype.getSavedSyncToken = function() { +IndexedDBStore.prototype.getSavedSyncToken = degradable(function() { return this.backend.getNextBatchToken(); -}, +}, "getSavedSyncToken"), /** * Delete all data from this store. * @return {Promise} Resolves if the data was deleted from the database. */ -IndexedDBStore.prototype.deleteAllData = function() { +IndexedDBStore.prototype.deleteAllData = degradable(function() { MemoryStore.prototype.deleteAllData.call(this); return this.backend.clearDatabase().then(() => { console.log("Deleted indexeddb data."); @@ -175,7 +177,7 @@ IndexedDBStore.prototype.deleteAllData = function() { console.error(`Failed to delete indexeddb data: ${err}`); throw err; }); -}; +}); /** * Whether this store would like to save its data @@ -203,7 +205,7 @@ IndexedDBStore.prototype.save = function() { return Promise.resolve(); }; -IndexedDBStore.prototype._reallySave = function() { +IndexedDBStore.prototype._reallySave = degradable(function() { this._syncTs = Date.now(); // set now to guard against multi-writes // work out changed users (this doesn't handle deletions but you @@ -219,14 +221,12 @@ IndexedDBStore.prototype._reallySave = function() { this._userModifiedMap[u.userId] = u.getLastModifiedTime(); } - return this.backend.syncToDatabase(userTuples).catch((err) => { - console.error("sync fail:", err); - }); -}; + return this.backend.syncToDatabase(userTuples); +}); -IndexedDBStore.prototype.setSyncData = function(syncData) { +IndexedDBStore.prototype.setSyncData = degradable(function(syncData) { return this.backend.setSyncData(syncData); -}; +}, "setSyncData"); /** * Returns the out-of-band membership events for this room that @@ -235,9 +235,9 @@ IndexedDBStore.prototype.setSyncData = function(syncData) { * @returns {event[]} the events, potentially an empty array if OOB loading didn't yield any new members * @returns {null} in case the members for this room haven't been stored yet */ -IndexedDBStore.prototype.getOutOfBandMembers = function(roomId) { +IndexedDBStore.prototype.getOutOfBandMembers = degradable(function(roomId) { return this.backend.getOutOfBandMembers(roomId); -}; +}, "getOutOfBandMembers"); /** * Stores the out-of-band membership events for this room. Note that @@ -247,20 +247,60 @@ IndexedDBStore.prototype.getOutOfBandMembers = function(roomId) { * @param {event[]} membershipEvents the membership events to store * @returns {Promise} when all members have been stored */ -IndexedDBStore.prototype.setOutOfBandMembers = function(roomId, membershipEvents) { +IndexedDBStore.prototype.setOutOfBandMembers = degradable(function( + roomId, + membershipEvents, +) { return this.backend.setOutOfBandMembers(roomId, membershipEvents); -}; +}, "setOutOfBandMembers"); -IndexedDBStore.prototype.clearOutOfBandMembers = function(roomId) { +IndexedDBStore.prototype.clearOutOfBandMembers = degradable(function(roomId) { return this.backend.clearOutOfBandMembers(roomId); -}; +}, "clearOutOfBandMembers"); -IndexedDBStore.prototype.getClientOptions = function() { +IndexedDBStore.prototype.getClientOptions = degradable(function() { return this.backend.getClientOptions(); -}; +}, "getClientOptions"); -IndexedDBStore.prototype.storeClientOptions = function(options) { +IndexedDBStore.prototype.storeClientOptions = degradable(function(options) { return this.backend.storeClientOptions(options); -}; +}, "storeClientOptions"); module.exports.IndexedDBStore = IndexedDBStore; + +/** + * All member functions of `IndexedDBStore` that access the backend use this wrapper to + * watch for failures after initial store startup, including `QuotaExceededError` as + * free disk space changes, etc. + * + * When IndexedDB fails via any of these paths, we degrade this back to a `MemoryStore` + * in place so that the current operation and all future ones are in-memory only. + * + * @param {Function} func The degradable work to do. + * @param {String} fallback The method name for fallback. + * @returns {Function} A wrapped member function. + */ +function degradable(func, fallback) { + return async function(...args) { + try { + return await func.call(this, ...args); + } catch (e) { + console.error("IndexedDBStore failure, degrading to MemoryStore", e); + try { + // We try to delete IndexedDB after degrading since this store is only a + // cache (the app will still function correctly without the data). + // It's possible that deleting repair IndexedDB for the next app load, + // potenially by making a little more space available. + console.log("IndexedDBStore trying to delete degraded data"); + await this.backend.clearDatabase(); + console.log("IndexedDBStore delete after degrading succeeeded"); + } catch (e) { + console.warn("IndexedDBStore delete after degrading failed", e); + } + Object.setPrototypeOf(this, MemoryStore.prototype); + if (fallback) { + return await MemoryStore.prototype[fallback].call(this, ...args); + } + } + }; +} From 8a2f84b67841786a4a5f0d3d165bd0a49753b2d1 Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Thu, 4 Apr 2019 11:36:34 +0100 Subject: [PATCH 008/159] Emit event when `IndexedDBStore` degrades This allows for optional tracking of when the store degrades to see how often it happens in the field. --- src/store/indexeddb.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/store/indexeddb.js b/src/store/indexeddb.js index 5be7d251f..3789159aa 100644 --- a/src/store/indexeddb.js +++ b/src/store/indexeddb.js @@ -20,6 +20,7 @@ limitations under the License. import Promise from 'bluebird'; import {MemoryStore} from "./memory"; import utils from "../utils"; +import {EventEmitter} from 'events'; import LocalIndexedDBStoreBackend from "./indexeddb-local-backend.js"; import RemoteIndexedDBStoreBackend from "./indexeddb-remote-backend.js"; import User from "../models/user"; @@ -112,6 +113,7 @@ const IndexedDBStore = function IndexedDBStore(opts) { }; }; utils.inherits(IndexedDBStore, MemoryStore); +utils.extend(IndexedDBStore.prototype, EventEmitter.prototype); IndexedDBStore.exists = function(indexedDB, dbName) { return LocalIndexedDBStoreBackend.exists(indexedDB, dbName); @@ -286,6 +288,7 @@ function degradable(func, fallback) { return await func.call(this, ...args); } catch (e) { console.error("IndexedDBStore failure, degrading to MemoryStore", e); + this.emit("degraded", e); try { // We try to delete IndexedDB after degrading since this store is only a // cache (the app will still function correctly without the data). From 3eb0c534a516ff411667639724721caab6a3a0c4 Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Thu, 4 Apr 2019 15:54:26 +0100 Subject: [PATCH 009/159] Clarify why it's safe to change store types on demand --- src/store/indexeddb.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/store/indexeddb.js b/src/store/indexeddb.js index 3789159aa..a7cbe2086 100644 --- a/src/store/indexeddb.js +++ b/src/store/indexeddb.js @@ -300,6 +300,13 @@ function degradable(func, fallback) { } catch (e) { console.warn("IndexedDBStore delete after degrading failed", e); } + // Degrade the store from being an instance of `IndexedDBStore` to instead be + // an instance of `MemoryStore` so that future API calls use the memory path + // directly and skip IndexedDB entirely. This should be safe as + // `IndexedDBStore` already extends from `MemoryStore`, so we are making the + // store become its parent type in a way. The mutator methods of + // `IndexedDBStore` also maintain the state that `MemoryStore` uses (many are + // not overridden at all). Object.setPrototypeOf(this, MemoryStore.prototype); if (fallback) { return await MemoryStore.prototype[fallback].call(this, ...args); From 389fcfaf3dade4c5411ba44375b07c116765a5fd Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Thu, 4 Apr 2019 16:29:43 +0100 Subject: [PATCH 010/159] Ensure IDB store maintains all memory state A few of the IDB store methods weren't updating memory store state, so let's improve those so we can reliably fall back to it from IDB at any time. --- src/store/indexeddb.js | 3 +++ src/store/memory.js | 7 +++++++ 2 files changed, 10 insertions(+) diff --git a/src/store/indexeddb.js b/src/store/indexeddb.js index a7cbe2086..35a862955 100644 --- a/src/store/indexeddb.js +++ b/src/store/indexeddb.js @@ -253,10 +253,12 @@ IndexedDBStore.prototype.setOutOfBandMembers = degradable(function( roomId, membershipEvents, ) { + MemoryStore.prototype.setOutOfBandMembers.call(this, roomId, membershipEvents); return this.backend.setOutOfBandMembers(roomId, membershipEvents); }, "setOutOfBandMembers"); IndexedDBStore.prototype.clearOutOfBandMembers = degradable(function(roomId) { + MemoryStore.prototype.clearOutOfBandMembers.call(this); return this.backend.clearOutOfBandMembers(roomId); }, "clearOutOfBandMembers"); @@ -265,6 +267,7 @@ IndexedDBStore.prototype.getClientOptions = degradable(function() { }, "getClientOptions"); IndexedDBStore.prototype.storeClientOptions = degradable(function(options) { + MemoryStore.prototype.storeClientOptions.call(this, options); return this.backend.storeClientOptions(options); }, "storeClientOptions"); diff --git a/src/store/memory.js b/src/store/memory.js index d9f65e4c6..df74f9860 100644 --- a/src/store/memory.js +++ b/src/store/memory.js @@ -385,6 +385,7 @@ module.exports.MemoryStore.prototype = { }; return Promise.resolve(); }, + /** * Returns the out-of-band membership events for this room that * were previously loaded. @@ -395,6 +396,7 @@ module.exports.MemoryStore.prototype = { getOutOfBandMembers: function(roomId) { return Promise.resolve(this._oobMembers[roomId] || null); }, + /** * Stores the out-of-band membership events for this room. Note that * it still makes sense to store an empty array as the OOB status for the room is @@ -408,6 +410,11 @@ module.exports.MemoryStore.prototype = { return Promise.resolve(); }, + clearOutOfBandMembers: function() { + this._oobMembers = {}; + return Promise.resolve(); + }, + getClientOptions: function() { return Promise.resolve(this._clientOptions); }, From 751060305c3529ce42d8ab910fee28f17f52d8cf Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Thu, 4 Apr 2019 14:07:16 -0400 Subject: [PATCH 011/159] update the name of the MAC method --- src/crypto/verification/SAS.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/crypto/verification/SAS.js b/src/crypto/verification/SAS.js index e17729b04..8d8858ca6 100644 --- a/src/crypto/verification/SAS.js +++ b/src/crypto/verification/SAS.js @@ -159,7 +159,7 @@ function generateSas(sasBytes, methods) { } const macMethods = { - "hmac-sha256-bits": "calculate_mac", + "hkdf-hmac-sha256": "calculate_mac", "hmac-sha256": "calculate_mac_long_kdf", }; @@ -169,7 +169,7 @@ const macMethods = { */ const KEY_AGREEMENT_LIST = ["curve25519"]; const HASHES_LIST = ["sha256"]; -const MAC_LIST = ["hmac-sha256-bits", "hmac-sha256"]; +const MAC_LIST = ["hkdf-hmac-sha256", "hmac-sha256"]; const SAS_LIST = Object.keys(sasGenerators); const KEY_AGREEMENT_SET = new Set(KEY_AGREEMENT_LIST); From 01af303d6391531b2aa71498376b52c9e490810c Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Thu, 4 Apr 2019 14:08:30 -0400 Subject: [PATCH 012/159] fix the selection of the verification methods, and test more things --- spec/unit/crypto/verification/sas.spec.js | 27 +++++++++++++++++++++++ src/crypto/verification/SAS.js | 12 +++++++--- 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/spec/unit/crypto/verification/sas.spec.js b/spec/unit/crypto/verification/sas.spec.js index 902d48083..660cfa666 100644 --- a/spec/unit/crypto/verification/sas.spec.js +++ b/spec/unit/crypto/verification/sas.spec.js @@ -160,10 +160,25 @@ describe("SAS verification", function() { }); it("should verify a key", async function() { + let macMethod; + const origSendToDevice = alice.sendToDevice; + bob.sendToDevice = function(type, map) { + if (type === "m.key.verification.accept") { + macMethod = map[alice.getUserId()][alice.deviceId] + .message_authentication_code; + } + return origSendToDevice.call(this, type, map); + }; + await Promise.all([ aliceVerifier.verify(), bobPromise.then((verifier) => verifier.verify()), ]); + + // make sure that it uses the preferred method + expect(macMethod).toBe("hkdf-hmac-sha256"); + + // make sure Alice and Bob verified each other expect(alice.setDeviceVerified) .toHaveBeenCalledWith(bob.getUserId(), bob.deviceId); expect(bob.setDeviceVerified) @@ -173,6 +188,7 @@ describe("SAS verification", function() { it("should be able to verify using the old MAC", async function() { // pretend that Alice can only understand the old (incorrect) MAC, // and make sure that she can still verify with Bob + let macMethod; const origSendToDevice = alice.sendToDevice; alice.sendToDevice = function(type, map) { if (type === "m.key.verification.start") { @@ -186,10 +202,21 @@ describe("SAS verification", function() { } return origSendToDevice.call(this, type, map); }; + bob.sendToDevice = function(type, map) { + if (type === "m.key.verification.accept") { + macMethod = map[alice.getUserId()][alice.deviceId] + .message_authentication_code; + } + return origSendToDevice.call(this, type, map); + }; + await Promise.all([ aliceVerifier.verify(), bobPromise.then((verifier) => verifier.verify()), ]); + + expect(macMethod).toBe("hmac-sha256"); + expect(alice.setDeviceVerified) .toHaveBeenCalledWith(bob.getUserId(), bob.deviceId); expect(bob.setDeviceVerified) diff --git a/src/crypto/verification/SAS.js b/src/crypto/verification/SAS.js index 8d8858ca6..5889c56be 100644 --- a/src/crypto/verification/SAS.js +++ b/src/crypto/verification/SAS.js @@ -281,12 +281,18 @@ export default class SAS extends Base { async _doRespondVerification() { let content = this.startEvent.getContent(); + // Note: we intersect using our pre-made lists, rather than the sets, + // so that the result will be in our order of preference. Then + // fetching the first element from the array will give our preferred + // method out of the ones offered by the other party. const keyAgreement - = intersection(content.key_agreement_protocols, KEY_AGREEMENT_SET)[0]; + = intersection( + KEY_AGREEMENT_LIST, new Set(content.key_agreement_protocols), + )[0]; const hashMethod - = intersection(content.hashes, HASHES_SET)[0]; + = intersection(HASHES_LIST, new Set(content.hashes))[0]; const macMethod - = intersection(content.message_authentication_codes, MAC_SET)[0]; + = intersection(MAC_LIST, new Set(content.message_authentication_codes))[0]; // FIXME: allow app to specify what SAS methods can be used const sasMethods = intersection(content.short_authentication_string, SAS_SET); From 0945e2c5c6cc1f22fd8bcc51f73fd440c6be802d Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Fri, 5 Apr 2019 11:36:01 -0600 Subject: [PATCH 013/159] Refuse to set forwards pagination token on live timeline Should fix the error seen in https://github.com/matrix-org/riot-web-rageshakes/issues/1389 (https://github.com/vector-im/riot-web/issues/8593) --- src/models/event-timeline-set.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/models/event-timeline-set.js b/src/models/event-timeline-set.js index 9675f7300..6025b7d77 100644 --- a/src/models/event-timeline-set.js +++ b/src/models/event-timeline-set.js @@ -446,6 +446,14 @@ EventTimelineSet.prototype.addEventsToTimeline = function(events, toStartOfTimel // new information, we update the pagination token for whatever // timeline we ended up on. if (lastEventWasNew || !didUpdate) { + if (direction === EventTimeline.FORWARDS && timeline === this._liveTimeline) { + console.warn({lastEventWasNew, didUpdate}); // for debugging + console.warn( + `Refusing to set forwards pagination token of live timeline ` + + `${timeline} to ${paginationToken}`, + ); + return; + } timeline.setPaginationToken(paginationToken, direction); } }; From 1d6f7f862f1cf11f5e332fbeaa3d157dd3fe57e5 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Fri, 5 Apr 2019 14:49:20 -0600 Subject: [PATCH 014/159] Track e2e highlights better, particularly in 'Mentions Only' rooms Fixes https://github.com/vector-im/riot-web/issues/9280 The server is unable to calculate encrypted highlights for us, so we calculate them. This also means the server always sends a zero for highlight_count, and therefore in sync.js we now trust our judgement over the server's. In future, this check will need to be altered to support server-side encrypted notifications if that happens. This fixes the part of 9280 where the badge count ends up disappearing unless the message received also happens to be a mention. The changes in client.js are more to support rooms which are mentions only. Because the server doesn't send an unread_count for these rooms, the total notifications will always be zero. Therefore, we try and calculate that. In order to do that, we need to assume that our highlight count is also wrong and calculate it appropriately. --- src/client.js | 23 ++++++++++++++++++----- src/sync.js | 14 +++++++++++--- 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/src/client.js b/src/client.js index ea6e32e86..e8b8c1cf8 100644 --- a/src/client.js +++ b/src/client.js @@ -242,19 +242,32 @@ function MatrixClient(opts) { const actions = this._pushProcessor.actionsForEvent(event); event.setPushActions(actions); // Might as well while we're here + const room = this.getRoom(event.getRoomId()); + if (!room) return; + + const currentCount = room.getUnreadNotificationCount("highlight"); + // Ensure the unread counts are kept up to date if the event is encrypted + // We also want to make sure that the notification count goes up if we already + // have encrypted events to avoid other code from resetting 'highlight' to zero. const oldHighlight = oldActions && oldActions.tweaks ? !!oldActions.tweaks.highlight : false; const newHighlight = actions && actions.tweaks ? !!actions.tweaks.highlight : false; - if (oldHighlight !== newHighlight) { - const room = this.getRoom(event.getRoomId()); + if (oldHighlight !== newHighlight || currentCount > 0) { // TODO: Handle mentions received while the client is offline // See also https://github.com/vector-im/riot-web/issues/9069 - if (room && !room.hasUserReadEvent(this.getUserId(), event.getId())) { - const current = room.getUnreadNotificationCount("highlight"); - const newCount = newHighlight ? current + 1 : current - 1; + if (!room.hasUserReadEvent(this.getUserId(), event.getId())) { + let newCount = currentCount; + if (newHighlight && !oldHighlight) newCount++; + if (!newHighlight && oldHighlight) newCount--; room.setUnreadNotificationCount("highlight", newCount); + + // Fix 'Mentions Only' rooms from not having the right badge count + const totalCount = room.getUnreadNotificationCount('total'); + if (totalCount < newCount) { + room.setUnreadNotificationCount('total', newCount); + } } } }); diff --git a/src/sync.js b/src/sync.js index 337f4100c..f024314e5 100644 --- a/src/sync.js +++ b/src/sync.js @@ -1076,9 +1076,17 @@ SyncApi.prototype._processSyncResponse = async function( room.setUnreadNotificationCount( 'total', joinObj.unread_notifications.notification_count, ); - room.setUnreadNotificationCount( - 'highlight', joinObj.unread_notifications.highlight_count, - ); + + // We track unread notifications ourselves in encrypted rooms, so don't + // bother setting it here. We trust our calculations better than the + // server's for this case, and therefore will assume that our non-zero + // count is accurate. + if (client.isRoomEncrypted(room.roomId) + && room.getUnreadNotificationCount('highlight') <= 0) { + room.setUnreadNotificationCount( + 'highlight', joinObj.unread_notifications.highlight_count, + ); + } } room.updateMyMembership("join"); From c495b12cefd24a1320d71e86b015325333517994 Mon Sep 17 00:00:00 2001 From: jkasun Date: Sun, 7 Apr 2019 00:19:02 +0530 Subject: [PATCH 015/159] change event redact, POST request to PUT request --- src/base-apis.js | 33 ++++++++++++++++++++++++++++++--- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/src/base-apis.js b/src/base-apis.js index 4e9fed77b..6ea62c0b2 100644 --- a/src/base-apis.js +++ b/src/base-apis.js @@ -1,3 +1,23 @@ +/** + * @param {string} roomId + * @param {string} eventId + * @param {string=} txnId transaction id. One will be made up if not + * supplied. + * @param {module:client.callback} callback Optional. + * @return {module:client.Promise} Resolves: TODO + * @return {module:http-api.MatrixError} Rejects: with an error response. + */ +MatrixBaseApis.prototype.redactEvent = function( + roomId, eventId, txnId, callback, +) { + const path = utils.encodeUri("/rooms/$roomId/redact/$eventId/$tnxId", { + $roomId: roomId, + $eventId: eventId, + $txnId: txnId ? txnId : this.makeTxnId(), + }); + + return this._http.authedRequest(callback, "PUT", path, undefined, {}); +}; /* Copyright 2015, 2016 OpenMarket Ltd Copyright 2017 Vector Creations Ltd @@ -891,18 +911,25 @@ MatrixBaseApis.prototype.sendStateEvent = function(roomId, eventType, content, s /** * @param {string} roomId * @param {string} eventId + * @param {string=} txnId transaction id. One will be made up if not + * supplied. * @param {module:client.callback} callback Optional. * @return {module:client.Promise} Resolves: TODO * @return {module:http-api.MatrixError} Rejects: with an error response. */ -MatrixBaseApis.prototype.redactEvent = function(roomId, eventId, callback) { - const path = utils.encodeUri("/rooms/$roomId/redact/$eventId", { +MatrixBaseApis.prototype.redactEvent = function( + roomId, eventId, txnId, callback, +) { + const path = utils.encodeUri("/rooms/$roomId/redact/$eventId/$tnxId", { $roomId: roomId, $eventId: eventId, + $txnId: txnId ? txnId : this.makeTxnId(), }); - return this._http.authedRequest(callback, "POST", path, undefined, {}); + + return this._http.authedRequest(callback, "PUT", path, undefined, {}); }; + /** * @param {string} roomId * @param {Number} limit From 8a56a5f1ed6af60c759a31d6470eda24fa449e3e Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Thu, 28 Mar 2019 18:29:38 -0600 Subject: [PATCH 016/159] Refuse splicing the live timeline into a broken position Credit to Matthew for basically solving this. Theoretically fixes spontaneous timeline corruption: https://github.com/vector-im/riot-web/issues/8593 When the live timeline ends up in a position where it can no longer be live (such as becoming the second timeline in the set, rather than the first) we end up getting neighbouring timeline errors. By refusing to splice the live timeline into such a position, we hopefully keep the live timeline in a position of still being live for when it is next used. The running theory that leads to this fix is multiple limited syncs coming in, causing holes in the timeline. When trying to patch up the holes, the timeline set would end up splicing all over the place, leading to potentially splicing the live timeline into a broken position. --- src/models/event-timeline-set.js | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/src/models/event-timeline-set.js b/src/models/event-timeline-set.js index 65e15dfc8..c3d006468 100644 --- a/src/models/event-timeline-set.js +++ b/src/models/event-timeline-set.js @@ -408,8 +408,21 @@ EventTimelineSet.prototype.addEventsToTimeline = function(events, toStartOfTimel console.info("Already have timeline for " + eventId + " - joining timeline " + timeline + " to " + existingTimeline); - timeline.setNeighbouringTimeline(existingTimeline, direction); - existingTimeline.setNeighbouringTimeline(timeline, inverseDirection); + + if (direction === EventTimeline.BACKWARDS && existingTimeline === this._liveTimeline) { + // The live timeline should never be spliced into a non-live position. + console.warn("Refusing to splice live timeline as a backwards timeline"); + } else { + timeline.setNeighbouringTimeline(existingTimeline, direction); + } + + if (inverseDirection === EventTimeline.BACKWARDS && timeline === this._liveTimeline) { + // The live timeline should never be spliced into a non-live position. + console.warn("Refusing to splice live timeline as a forwards timeline"); + } else { + existingTimeline.setNeighbouringTimeline(timeline, inverseDirection); + } + timeline = existingTimeline; didUpdate = true; } From a54845bf76d58b2aa935389ca12be3e82830e79f Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Thu, 28 Mar 2019 18:34:57 -0600 Subject: [PATCH 017/159] Appease the linter --- src/models/event-timeline-set.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/models/event-timeline-set.js b/src/models/event-timeline-set.js index c3d006468..b8ea153ab 100644 --- a/src/models/event-timeline-set.js +++ b/src/models/event-timeline-set.js @@ -409,14 +409,18 @@ EventTimelineSet.prototype.addEventsToTimeline = function(events, toStartOfTimel " - joining timeline " + timeline + " to " + existingTimeline); - if (direction === EventTimeline.BACKWARDS && existingTimeline === this._liveTimeline) { + // Variables to keep the line length limited below. + const existingIsLive = existingTimeline === this._liveTimeline; + const timelineIsLive = timeline === this._liveTimeline; + + if (direction === EventTimeline.BACKWARDS && existingIsLive) { // The live timeline should never be spliced into a non-live position. console.warn("Refusing to splice live timeline as a backwards timeline"); } else { timeline.setNeighbouringTimeline(existingTimeline, direction); } - if (inverseDirection === EventTimeline.BACKWARDS && timeline === this._liveTimeline) { + if (inverseDirection === EventTimeline.BACKWARDS && timelineIsLive) { // The live timeline should never be spliced into a non-live position. console.warn("Refusing to splice live timeline as a forwards timeline"); } else { From 91aa783c3d4b2fdcdde4fcd9ea3d361696a3a322 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Thu, 28 Mar 2019 18:59:40 -0600 Subject: [PATCH 018/159] Use better words for warnings --- src/models/event-timeline-set.js | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/models/event-timeline-set.js b/src/models/event-timeline-set.js index b8ea153ab..501df8ece 100644 --- a/src/models/event-timeline-set.js +++ b/src/models/event-timeline-set.js @@ -415,14 +415,20 @@ EventTimelineSet.prototype.addEventsToTimeline = function(events, toStartOfTimel if (direction === EventTimeline.BACKWARDS && existingIsLive) { // The live timeline should never be spliced into a non-live position. - console.warn("Refusing to splice live timeline as a backwards timeline"); + console.warn( + "Refusing to set a preceding existingTimeLine on our " + + "timeline as the existingTimeLine is live", + ); } else { timeline.setNeighbouringTimeline(existingTimeline, direction); } if (inverseDirection === EventTimeline.BACKWARDS && timelineIsLive) { // The live timeline should never be spliced into a non-live position. - console.warn("Refusing to splice live timeline as a forwards timeline"); + console.warn( + "Refusing to set our preceding timeline on a existingTimeLine " + + "as our timeline is live", + ); } else { existingTimeline.setNeighbouringTimeline(timeline, inverseDirection); } From d153c4da075c17a754bc11e040936e818ab27681 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Thu, 28 Mar 2019 19:14:17 -0600 Subject: [PATCH 019/159] log the timeline that broke --- src/models/event-timeline-set.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/models/event-timeline-set.js b/src/models/event-timeline-set.js index 501df8ece..cd199be2e 100644 --- a/src/models/event-timeline-set.js +++ b/src/models/event-timeline-set.js @@ -417,7 +417,7 @@ EventTimelineSet.prototype.addEventsToTimeline = function(events, toStartOfTimel // The live timeline should never be spliced into a non-live position. console.warn( "Refusing to set a preceding existingTimeLine on our " + - "timeline as the existingTimeLine is live", + "timeline as the existingTimeLine is live (" + existingTimeline + ")", ); } else { timeline.setNeighbouringTimeline(existingTimeline, direction); @@ -427,7 +427,7 @@ EventTimelineSet.prototype.addEventsToTimeline = function(events, toStartOfTimel // The live timeline should never be spliced into a non-live position. console.warn( "Refusing to set our preceding timeline on a existingTimeLine " + - "as our timeline is live", + "as our timeline is live (" + timeline + ")", ); } else { existingTimeline.setNeighbouringTimeline(timeline, inverseDirection); From 0ab41215f06a03b8f483eeb8d6553d1086e88e6c Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 1 Apr 2019 11:11:35 -0600 Subject: [PATCH 020/159] Add a tiny bit of logging to work out what timelines are doing See https://github.com/vector-im/riot-web/issues/8593 --- src/models/event-timeline.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/event-timeline.js b/src/models/event-timeline.js index d23e483da..63846cebd 100644 --- a/src/models/event-timeline.js +++ b/src/models/event-timeline.js @@ -276,7 +276,7 @@ EventTimeline.prototype.getNeighbouringTimeline = function(direction) { EventTimeline.prototype.setNeighbouringTimeline = function(neighbour, direction) { if (this.getNeighbouringTimeline(direction)) { throw new Error("timeline already has a neighbouring timeline - " + - "cannot reset neighbour"); + "cannot reset neighbour (direction: " + direction + ")"); } if (direction == EventTimeline.BACKWARDS) { From 963e271bce31ef2693b9626007a59a5c63b7787e Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 1 Apr 2019 18:43:48 -0600 Subject: [PATCH 021/159] Refuse to link live timelines into the forwards/backwards position See https://github.com/vector-im/riot-web/issues/8593#issuecomment-478681816 Previously (https://github.com/matrix-org/matrix-js-sdk/pull/873) we allowed half-linking timelines to each other if they satisfy the conditions, however this appears to not be helping. Instead, it seems like the timelines are getting stuck in a position where one direction is spliced but the other is broken. To avoid this case, we'll just avoid splicing in both directions when one of the directions is invalid. --- src/models/event-timeline-set.js | 37 ++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/src/models/event-timeline-set.js b/src/models/event-timeline-set.js index cd199be2e..9675f7300 100644 --- a/src/models/event-timeline-set.js +++ b/src/models/event-timeline-set.js @@ -413,25 +413,30 @@ EventTimelineSet.prototype.addEventsToTimeline = function(events, toStartOfTimel const existingIsLive = existingTimeline === this._liveTimeline; const timelineIsLive = timeline === this._liveTimeline; - if (direction === EventTimeline.BACKWARDS && existingIsLive) { + const backwardsIsLive = direction === EventTimeline.BACKWARDS && existingIsLive; + const forwardsIsLive = direction === EventTimeline.FORWARDS && timelineIsLive; + + if (backwardsIsLive || forwardsIsLive) { // The live timeline should never be spliced into a non-live position. - console.warn( - "Refusing to set a preceding existingTimeLine on our " + - "timeline as the existingTimeLine is live (" + existingTimeline + ")", - ); - } else { - timeline.setNeighbouringTimeline(existingTimeline, direction); + // We use independent logging to better discover the problem at a glance. + console.warn({backwardsIsLive, forwardsIsLive}); // debugging + if (backwardsIsLive) { + console.warn( + "Refusing to set a preceding existingTimeLine on our " + + "timeline as the existingTimeLine is live (" + existingTimeline + ")", + ); + } + if (forwardsIsLive) { + console.warn( + "Refusing to set our preceding timeline on a existingTimeLine " + + "as our timeline is live (" + timeline + ")", + ); + } + continue; // abort splicing - try next event } - if (inverseDirection === EventTimeline.BACKWARDS && timelineIsLive) { - // The live timeline should never be spliced into a non-live position. - console.warn( - "Refusing to set our preceding timeline on a existingTimeLine " + - "as our timeline is live (" + timeline + ")", - ); - } else { - existingTimeline.setNeighbouringTimeline(timeline, inverseDirection); - } + timeline.setNeighbouringTimeline(existingTimeline, direction); + existingTimeline.setNeighbouringTimeline(timeline, inverseDirection); timeline = existingTimeline; didUpdate = true; From 828c51467ffa6d3c9d713a326e3f46518403d1f1 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Fri, 5 Apr 2019 11:36:01 -0600 Subject: [PATCH 022/159] Refuse to set forwards pagination token on live timeline Should fix the error seen in https://github.com/matrix-org/riot-web-rageshakes/issues/1389 (https://github.com/vector-im/riot-web/issues/8593) --- src/models/event-timeline-set.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/models/event-timeline-set.js b/src/models/event-timeline-set.js index 9675f7300..6025b7d77 100644 --- a/src/models/event-timeline-set.js +++ b/src/models/event-timeline-set.js @@ -446,6 +446,14 @@ EventTimelineSet.prototype.addEventsToTimeline = function(events, toStartOfTimel // new information, we update the pagination token for whatever // timeline we ended up on. if (lastEventWasNew || !didUpdate) { + if (direction === EventTimeline.FORWARDS && timeline === this._liveTimeline) { + console.warn({lastEventWasNew, didUpdate}); // for debugging + console.warn( + `Refusing to set forwards pagination token of live timeline ` + + `${timeline} to ${paginationToken}`, + ); + return; + } timeline.setPaginationToken(paginationToken, direction); } }; From f79f2105fdf5c10548ccfe152d7f43c8441cd44d Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Mon, 8 Apr 2019 16:03:25 +0200 Subject: [PATCH 023/159] Prepare changelog for v1.0.4 --- CHANGELOG.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cbc93629e..e09a9281c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +Changes in [1.0.4](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v1.0.4) (2019-04-08) +================================================================================================ +[Full Changelog](https://github.com/matrix-org/matrix-js-sdk/compare/v1.0.3...v1.0.4) + + * Hotfix: more logging and potential fixes for timeline corruption issue, see ticket https://github.com/vector-im/riot-web/issues/8593. + Changes in [1.0.3](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v1.0.3) (2019-04-01) ================================================================================================ [Full Changelog](https://github.com/matrix-org/matrix-js-sdk/compare/v1.0.3-rc.1...v1.0.3) @@ -211,7 +217,7 @@ Changes in [0.14.0-rc.1](https://github.com/matrix-org/matrix-js-sdk/releases/ta BREAKING CHANGE ---------------- - + * js-sdk now uses Olm 3.0. Apps using Olm must update to 3.0 to continue using Olm with the js-sdk. The js-sdk will call Olm's init() method when the client is started. From 00f5ddc93c3e0eec8f531fd0ee27d3a33d930692 Mon Sep 17 00:00:00 2001 From: Bruno Windels Date: Mon, 8 Apr 2019 16:03:26 +0200 Subject: [PATCH 024/159] v1.0.4 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index db2ac4a67..98a1f5543 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "matrix-js-sdk", - "version": "1.0.3", + "version": "1.0.4", "description": "Matrix Client-Server SDK for Javascript", "main": "index.js", "scripts": { From 0cc9994b8b230f2ba1a2eb79809020f0a796844d Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Mon, 8 Apr 2019 15:55:52 +0100 Subject: [PATCH 025/159] Add logging to sync startup path In https://github.com/vector-im/riot-web/issues/7769, we're seeing sync startup fail to complete, but the actual error isn't being logged. Hopefully these extra debug logs will provide more insight into the failing step. --- src/sync.js | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/src/sync.js b/src/sync.js index f024314e5..d1e58e14d 100644 --- a/src/sync.js +++ b/src/sync.js @@ -472,13 +472,16 @@ SyncApi.prototype.sync = function() { async function getPushRules() { try { + debuglog("Getting push rules..."); const result = await client.getPushRules(); debuglog("Got push rules"); client.pushRules = result; } catch (err) { + console.error("Getting push rules failed", err); // wait for saved sync to complete before doing anything else, // otherwise the sync state will end up being incorrect + debuglog("Waiting for saved sync before retrying push rules..."); await self.recoverFromSyncStartupError(savedSyncPromise, err); getPushRules(); return; @@ -487,22 +490,27 @@ SyncApi.prototype.sync = function() { } const checkLazyLoadStatus = async () => { + debuglog("Checking lazy load status..."); if (this.opts.lazyLoadMembers && client.isGuest()) { this.opts.lazyLoadMembers = false; } if (this.opts.lazyLoadMembers) { + debuglog("Checking server lazy load support..."); const supported = await client.doesServerSupportLazyLoading(); if (supported) { + debuglog("Creating and storing lazy load sync filter..."); this.opts.filter = await client.createFilter( Filter.LAZY_LOADING_SYNC_FILTER, ); + debuglog("Created and stored lazy load sync filter"); } else { - console.log("LL: lazy loading requested but not supported " + + debuglog("LL: lazy loading requested but not supported " + "by server, so disabling"); this.opts.lazyLoadMembers = false; } } // need to vape the store when enabling LL and wasn't enabled before + debuglog("Checking whether lazy loading has changed in store..."); const shouldClear = await this._wasLazyLoadingToggled(this.opts.lazyLoadMembers); if (shouldClear) { this._storeIsInvalid = true; @@ -519,12 +527,15 @@ SyncApi.prototype.sync = function() { if (this.opts.lazyLoadMembers && this.opts.crypto) { this.opts.crypto.enableLazyLoading(); } + debuglog("Storing client options..."); await this.client._storeClientOptions(); + debuglog("Stored client options"); getFilter(); // Now get the filter and start syncing }; async function getFilter() { + debuglog("Getting filter..."); let filter; if (self.opts.filter) { filter = self.opts.filter; @@ -539,8 +550,10 @@ SyncApi.prototype.sync = function() { getFilterName(client.credentials.userId), filter, ); } catch (err) { + console.error("Getting filter failed", err); // wait for saved sync to complete before doing anything else, // otherwise the sync state will end up being incorrect + debuglog("Waiting for saved sync before retrying filter..."); await self.recoverFromSyncStartupError(savedSyncPromise, err); getFilter(); return; @@ -554,11 +567,12 @@ SyncApi.prototype.sync = function() { if (self._currentSyncRequest === null) { // Send this first sync request here so we can then wait for the saved // sync data to finish processing before we process the results of this one. - console.log("Sending first sync request..."); + debuglog("Sending first sync request..."); self._currentSyncRequest = self._doSyncRequest({ filterId }, savedSyncToken); } // Now wait for the saved sync to finish... + debuglog("Waiting for saved sync before starting sync processing..."); await savedSyncPromise; self._sync({ filterId }); } @@ -570,13 +584,19 @@ SyncApi.prototype.sync = function() { // Pull the saved sync token out first, before the worker starts sending // all the sync data which could take a while. This will let us send our // first incremental sync request before we've processed our saved data. + debuglog("Getting saved sync token..."); savedSyncPromise = client.store.getSavedSyncToken().then((tok) => { + debuglog("Got saved sync token"); savedSyncToken = tok; + debuglog("Getting saved sync..."); return client.store.getSavedSync(); }).then((savedSync) => { + debuglog(`Got reply from saved sync, exists? ${!!savedSync}`); if (savedSync) { return self._syncFromCache(savedSync); } + }).catch(err => { + console.error("Getting saved sync failed", err); }); // Now start the first incremental sync request: this can also // take a while so if we set it going now, we can wait for it @@ -666,6 +686,7 @@ SyncApi.prototype._syncFromCache = async function(savedSync) { * @param {boolean} syncOptions.hasSyncedBefore */ SyncApi.prototype._sync = async function(syncOptions) { + debuglog("Starting sync request processing..."); const client = this.client; if (!this._running) { @@ -704,7 +725,9 @@ SyncApi.prototype._sync = async function(syncOptions) { // Reset after a successful sync this._failedSyncCount = 0; + debuglog("Storing sync data..."); await client.store.setSyncData(data); + debuglog("Sync data stored"); const syncEventData = { oldSyncToken: syncToken, @@ -719,6 +742,7 @@ SyncApi.prototype._sync = async function(syncOptions) { } try { + debuglog("Processing sync response..."); await this._processSyncResponse(syncEventData, data); } catch(e) { // log the exception with stack if we have it, else fall back From 58b752c63bf6362113f94627984de66badde2345 Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Mon, 8 Apr 2019 16:24:25 +0100 Subject: [PATCH 026/159] Document checking crypto state before using `hasUnverifiedDevices` It's unclear what `hasUnverifiedDevices` should do when crypto is disabled on the current device. Let's at least document that callers should first check crypto status. --- src/models/room.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/models/room.js b/src/models/room.js index 276b9e5cc..faf865726 100644 --- a/src/models/room.js +++ b/src/models/room.js @@ -598,6 +598,11 @@ Room.prototype._fixUpLegacyTimelineFields = function() { /** * Returns whether there are any devices in the room that are unverified + * + * Note: Callers should first check if crypto is enabled on this device. If it is + * disabled, then we aren't tracking room devices at all, so we can't answer this, and an + * error will be thrown. + * * @return {bool} the result */ Room.prototype.hasUnverifiedDevices = async function() { From 663c0964003e2450e3fb32c61eb62f8cb30bdfc0 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 8 Apr 2019 12:24:00 -0600 Subject: [PATCH 027/159] Cache failed capabilities lookups for shorter amounts of time This should fix https://github.com/vector-im/riot-web/issues/9225 for showing up too often/too long. --- src/client.js | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/client.js b/src/client.js index e8b8c1cf8..1b3b65baf 100644 --- a/src/client.js +++ b/src/client.js @@ -445,9 +445,10 @@ MatrixClient.prototype.setNotifTimelineSet = function(notifTimelineSet) { * @return {module:http-api.MatrixError} Rejects: with an error response. */ MatrixClient.prototype.getCapabilities = function() { + const now = new Date().getTime(); + if (this._cachedCapabilities) { - const now = new Date().getTime(); - if (now - this._cachedCapabilities.lastUpdated <= CAPABILITIES_CACHE_MS) { + if (now < this._cachedCapabilities.expiration) { console.log("Returning cached capabilities"); return Promise.resolve(this._cachedCapabilities.capabilities); } @@ -459,9 +460,16 @@ MatrixClient.prototype.getCapabilities = function() { ).catch(() => null).then((r) => { if (!r) r = {}; const capabilities = r["capabilities"] || {}; + + // If the capabilities missed the cache, cache it for a shorter amount + // of time to try and refresh them later. + const cacheMs = Object.keys(capabilities).length + ? CAPABILITIES_CACHE_MS + : 60000 + (Math.random() * 5000); + this._cachedCapabilities = { capabilities: capabilities, - lastUpdated: new Date().getTime(), + expiration: now + cacheMs, }; console.log("Caching capabilities: ", capabilities); From 9dc344999eb27e13362a2e5fae0cdc415c105438 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 8 Apr 2019 15:57:44 -0600 Subject: [PATCH 028/159] Fix highlight notifications for unencrypted rooms A logic error introduced by https://github.com/matrix-org/matrix-js-sdk/pull/886 meant that all unencrypted rooms were not getting highlight notifications. --- src/sync.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sync.js b/src/sync.js index d1e58e14d..0c6c71cac 100644 --- a/src/sync.js +++ b/src/sync.js @@ -1105,8 +1105,8 @@ SyncApi.prototype._processSyncResponse = async function( // bother setting it here. We trust our calculations better than the // server's for this case, and therefore will assume that our non-zero // count is accurate. - if (client.isRoomEncrypted(room.roomId) - && room.getUnreadNotificationCount('highlight') <= 0) { + const encrypted = client.isRoomEncrypted(room.roomId); + if (!encrypted || (encrypted && room.getUnreadNotificationCount('highlight') <= 0)) { room.setUnreadNotificationCount( 'highlight', joinObj.unread_notifications.highlight_count, ); From b0c3d0d2e3162d5c33c81180e4bec70003852ddc Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Tue, 9 Apr 2019 15:06:54 +0100 Subject: [PATCH 029/159] Explicitly guard store usage during sync startup This adds explicit `try` blocks in the spots where we interact with the store during sync startup. This shouldn't be necessary as the store should already be catching this and degrading as of https://github.com/matrix-org/matrix-js-sdk/pull/884, but that doesn't seem to have been enough for the affected user in https://github.com/vector-im/riot-web/issues/7769, as they are seeing sync just stop when storing without any further detail. --- src/sync.js | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/src/sync.js b/src/sync.js index d1e58e14d..9221eef2d 100644 --- a/src/sync.js +++ b/src/sync.js @@ -499,9 +499,16 @@ SyncApi.prototype.sync = function() { const supported = await client.doesServerSupportLazyLoading(); if (supported) { debuglog("Creating and storing lazy load sync filter..."); - this.opts.filter = await client.createFilter( - Filter.LAZY_LOADING_SYNC_FILTER, - ); + try { + this.opts.filter = await client.createFilter( + Filter.LAZY_LOADING_SYNC_FILTER, + ); + } catch (err) { + console.error( + "Creating and storing lazy load sync filter failed", + err, + ); + } debuglog("Created and stored lazy load sync filter"); } else { debuglog("LL: lazy loading requested but not supported " + @@ -528,7 +535,11 @@ SyncApi.prototype.sync = function() { this.opts.crypto.enableLazyLoading(); } debuglog("Storing client options..."); - await this.client._storeClientOptions(); + try { + await this.client._storeClientOptions(); + } catch (err) { + console.error("Storing client options failed", err); + } debuglog("Stored client options"); getFilter(); // Now get the filter and start syncing From f72ae490a804214f4f9b4321bd91cb2383f66a7d Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Tue, 9 Apr 2019 09:05:20 -0600 Subject: [PATCH 030/159] Appease the linter --- src/sync.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sync.js b/src/sync.js index 0c6c71cac..24e9168b1 100644 --- a/src/sync.js +++ b/src/sync.js @@ -1106,7 +1106,8 @@ SyncApi.prototype._processSyncResponse = async function( // server's for this case, and therefore will assume that our non-zero // count is accurate. const encrypted = client.isRoomEncrypted(room.roomId); - if (!encrypted || (encrypted && room.getUnreadNotificationCount('highlight') <= 0)) { + if (!encrypted + || (encrypted && room.getUnreadNotificationCount('highlight') <= 0)) { room.setUnreadNotificationCount( 'highlight', joinObj.unread_notifications.highlight_count, ); From 3aa8bfa6cabf2bbda02e53f7f500b1907bc13825 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 8 Apr 2019 12:24:37 -0600 Subject: [PATCH 031/159] Flag v3 rooms as safe The spec says they are, so we might as well too. --- src/models/room.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/room.js b/src/models/room.js index faf865726..623f09668 100644 --- a/src/models/room.js +++ b/src/models/room.js @@ -38,7 +38,7 @@ import ReEmitter from '../ReEmitter'; // to upgrade (ie: "stable"). Eventually, we should remove these when all homeservers // return an m.room_versions capability. const KNOWN_SAFE_ROOM_VERSION = '1'; -const SAFE_ROOM_VERSIONS = ['1', '2']; +const SAFE_ROOM_VERSIONS = ['1', '2', '3']; function synthesizeReceipt(userId, event, receiptType) { // console.log("synthesizing receipt for "+event.getId()); From 6979177fb27352059d85e74dd80e7eb129595197 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Tue, 9 Apr 2019 09:27:38 -0600 Subject: [PATCH 032/159] Log errors for capabilities requests --- src/client.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/client.js b/src/client.js index 1b3b65baf..83912c45a 100644 --- a/src/client.js +++ b/src/client.js @@ -457,7 +457,10 @@ MatrixClient.prototype.getCapabilities = function() { // We swallow errors because we need a default object anyhow return this._http.authedRequest( undefined, "GET", "/capabilities", - ).catch(() => null).then((r) => { + ).catch((e) => { + console.error(e); + return null; // otherwise consume the error + }).then((r) => { if (!r) r = {}; const capabilities = r["capabilities"] || {}; From b99243406be6116bc2e1d4790dfcbe7c46c28c59 Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Tue, 9 Apr 2019 16:40:13 +0100 Subject: [PATCH 033/159] Move logs inside try block --- src/sync.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/sync.js b/src/sync.js index 9221eef2d..f4142ebfe 100644 --- a/src/sync.js +++ b/src/sync.js @@ -498,18 +498,18 @@ SyncApi.prototype.sync = function() { debuglog("Checking server lazy load support..."); const supported = await client.doesServerSupportLazyLoading(); if (supported) { - debuglog("Creating and storing lazy load sync filter..."); try { + debuglog("Creating and storing lazy load sync filter..."); this.opts.filter = await client.createFilter( Filter.LAZY_LOADING_SYNC_FILTER, ); + debuglog("Created and stored lazy load sync filter"); } catch (err) { console.error( "Creating and storing lazy load sync filter failed", err, ); } - debuglog("Created and stored lazy load sync filter"); } else { debuglog("LL: lazy loading requested but not supported " + "by server, so disabling"); @@ -534,13 +534,13 @@ SyncApi.prototype.sync = function() { if (this.opts.lazyLoadMembers && this.opts.crypto) { this.opts.crypto.enableLazyLoading(); } - debuglog("Storing client options..."); try { + debuglog("Storing client options..."); await this.client._storeClientOptions(); + debuglog("Stored client options"); } catch (err) { console.error("Storing client options failed", err); } - debuglog("Stored client options"); getFilter(); // Now get the filter and start syncing }; From 44de7fad6fe492ad4e720863157805cec4de5354 Mon Sep 17 00:00:00 2001 From: "J. Ryan Stinnett" Date: Tue, 9 Apr 2019 16:44:36 +0100 Subject: [PATCH 034/159] Preserve previous error flow --- src/sync.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sync.js b/src/sync.js index f4142ebfe..25d0db79c 100644 --- a/src/sync.js +++ b/src/sync.js @@ -509,6 +509,7 @@ SyncApi.prototype.sync = function() { "Creating and storing lazy load sync filter failed", err, ); + throw err; } } else { debuglog("LL: lazy loading requested but not supported " + @@ -540,6 +541,7 @@ SyncApi.prototype.sync = function() { debuglog("Stored client options"); } catch (err) { console.error("Storing client options failed", err); + throw err; } getFilter(); // Now get the filter and start syncing From 13d3be637b6c5cd40c507670dea529ce4115d770 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Tue, 9 Apr 2019 09:58:33 -0600 Subject: [PATCH 035/159] Copyright 2019 NV --- src/models/room.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/room.js b/src/models/room.js index 623f09668..84bcfd0ff 100644 --- a/src/models/room.js +++ b/src/models/room.js @@ -1,6 +1,6 @@ /* Copyright 2015, 2016 OpenMarket Ltd -Copyright 2018 New Vector Ltd +Copyright 2018, 2019 New Vector Ltd Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. From 6db973f430cb68a5c96489e01504a0f03dde70e6 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Thu, 11 Apr 2019 15:50:43 -0600 Subject: [PATCH 036/159] Expose better autodiscovery error messages Fixes https://github.com/vector-im/riot-web/issues/7925 --- src/autodiscovery.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/autodiscovery.js b/src/autodiscovery.js index ae2ae5e05..f8723184c 100644 --- a/src/autodiscovery.js +++ b/src/autodiscovery.js @@ -202,6 +202,8 @@ export class AutoDiscovery { } else { // this can only ever be FAIL_PROMPT at this point. clientConfig["m.homeserver"].state = AutoDiscovery.FAIL_PROMPT; + clientConfig["m.homeserver"].error = + "Failed to get autodiscovery configuration from server"; } return Promise.resolve(clientConfig); } @@ -213,6 +215,7 @@ export class AutoDiscovery { ); if (!hsUrl) { logger.error("Invalid base_url for m.homeserver"); + clientConfig["m.homeserver"].error = "Invalid base_url for m.homeserver"; return Promise.resolve(clientConfig); } @@ -222,6 +225,8 @@ export class AutoDiscovery { ); if (!hsVersions || !hsVersions.raw["versions"]) { logger.error("Invalid /versions response"); + clientConfig["m.homeserver"].error = + "Homeserver URL does not appear to be a valid Matrix homeserver"; return Promise.resolve(clientConfig); } @@ -263,6 +268,8 @@ export class AutoDiscovery { ); if (!isUrl) { logger.error("Invalid base_url for m.identity_server"); + failingClientConfig["m.identity_server"].error = + "Invalid base_url for m.identity_server"; return Promise.resolve(failingClientConfig); } @@ -273,6 +280,8 @@ export class AutoDiscovery { ); if (!isResponse || !isResponse.raw || isResponse.action !== "SUCCESS") { logger.error("Invalid /api/v1 response"); + failingClientConfig["m.identity_server"].error = + "Identity server URL does not appear to be a valid identity server"; return Promise.resolve(failingClientConfig); } } From 348c293962a4317cd454eb5012d91e70ed9eed52 Mon Sep 17 00:00:00 2001 From: jkasun Date: Sat, 13 Apr 2019 22:23:57 +0530 Subject: [PATCH 037/159] Argument Length Check, Duplicate Fix for Redact Funcation --- src/base-apis.js | 24 ++++-------------------- 1 file changed, 4 insertions(+), 20 deletions(-) diff --git a/src/base-apis.js b/src/base-apis.js index 6ea62c0b2..59e0f432f 100644 --- a/src/base-apis.js +++ b/src/base-apis.js @@ -1,23 +1,3 @@ -/** - * @param {string} roomId - * @param {string} eventId - * @param {string=} txnId transaction id. One will be made up if not - * supplied. - * @param {module:client.callback} callback Optional. - * @return {module:client.Promise} Resolves: TODO - * @return {module:http-api.MatrixError} Rejects: with an error response. - */ -MatrixBaseApis.prototype.redactEvent = function( - roomId, eventId, txnId, callback, -) { - const path = utils.encodeUri("/rooms/$roomId/redact/$eventId/$tnxId", { - $roomId: roomId, - $eventId: eventId, - $txnId: txnId ? txnId : this.makeTxnId(), - }); - - return this._http.authedRequest(callback, "PUT", path, undefined, {}); -}; /* Copyright 2015, 2016 OpenMarket Ltd Copyright 2017 Vector Creations Ltd @@ -920,6 +900,10 @@ MatrixBaseApis.prototype.sendStateEvent = function(roomId, eventType, content, s MatrixBaseApis.prototype.redactEvent = function( roomId, eventId, txnId, callback, ) { + if (arguments.length === 3) { + callback = txnId; + } + const path = utils.encodeUri("/rooms/$roomId/redact/$eventId/$tnxId", { $roomId: roomId, $eventId: eventId, From dcd9b5c382b41e8465ffae025feff9db21de7ffe Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Sun, 14 Apr 2019 21:38:51 -0600 Subject: [PATCH 038/159] Stop syncing when the token is invalid Fixes https://github.com/vector-im/riot-web/issues/9451 --- src/sync.js | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/sync.js b/src/sync.js index f7f623448..18cc579fb 100644 --- a/src/sync.js +++ b/src/sync.js @@ -462,6 +462,16 @@ SyncApi.prototype.sync = function() { let savedSyncPromise = Promise.resolve(); let savedSyncToken = null; + async function shouldAbortSync(err) { + if (err.errcode === "M_UNKNOWN_TOKEN") { + // The logout already happened, we just need to stop. + console.warn("Token no longer valid - assuming logout"); + self.stop(); + return true; + } + return false; + } + // We need to do one-off checks before we can begin the /sync loop. // These are: // 1) We need to get push rules so we can check if events should bing as we get @@ -479,6 +489,7 @@ SyncApi.prototype.sync = function() { client.pushRules = result; } catch (err) { console.error("Getting push rules failed", err); + if (shouldAbortSync(err)) return; // wait for saved sync to complete before doing anything else, // otherwise the sync state will end up being incorrect debuglog("Waiting for saved sync before retrying push rules..."); @@ -564,6 +575,7 @@ SyncApi.prototype.sync = function() { ); } catch (err) { console.error("Getting filter failed", err); + if (shouldAbortSync(err)) return; // wait for saved sync to complete before doing anything else, // otherwise the sync state will end up being incorrect debuglog("Waiting for saved sync before retrying filter..."); @@ -877,6 +889,12 @@ SyncApi.prototype._onSyncError = function(err, syncOptions) { console.error("/sync error %s", err); console.error(err); + if (err.errcode === "M_UNKNOWN_TOKEN") { + console.warn("Access token no longer valid - stopping sync"); + this.stop(); + return; + } + this._failedSyncCount++; console.log('Number of consecutive failed sync requests:', this._failedSyncCount); From 430da8ac0993be4e9ca1ead9ba7ac4e6f6c492cc Mon Sep 17 00:00:00 2001 From: jkasun Date: Mon, 15 Apr 2019 20:34:39 +0530 Subject: [PATCH 039/159] JS Doc Fix --- src/base-apis.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/base-apis.js b/src/base-apis.js index 59e0f432f..6750e7823 100644 --- a/src/base-apis.js +++ b/src/base-apis.js @@ -891,7 +891,7 @@ MatrixBaseApis.prototype.sendStateEvent = function(roomId, eventType, content, s /** * @param {string} roomId * @param {string} eventId - * @param {string=} txnId transaction id. One will be made up if not + * @param {string} [txnId] transaction id. One will be made up if not * supplied. * @param {module:client.callback} callback Optional. * @return {module:client.Promise} Resolves: TODO From f7d19842573b4bee45c466b35c7a40910894381d Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Apr 2019 11:29:55 -0600 Subject: [PATCH 040/159] De-duplicate usage of shouldAbortSync --- src/sync.js | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/src/sync.js b/src/sync.js index 18cc579fb..54853ef79 100644 --- a/src/sync.js +++ b/src/sync.js @@ -445,6 +445,16 @@ SyncApi.prototype._wasLazyLoadingToggled = async function(lazyLoadMembers) { return false; }; +SyncApi.prototype._shouldAbortSync = function(error) { + if (err.errcode === "M_UNKNOWN_TOKEN") { + // The logout already happened, we just need to stop. + console.warn("Token no longer valid - assuming logout"); + self.stop(); + return true; + } + return false; +}; + /** * Main entry point */ @@ -462,16 +472,6 @@ SyncApi.prototype.sync = function() { let savedSyncPromise = Promise.resolve(); let savedSyncToken = null; - async function shouldAbortSync(err) { - if (err.errcode === "M_UNKNOWN_TOKEN") { - // The logout already happened, we just need to stop. - console.warn("Token no longer valid - assuming logout"); - self.stop(); - return true; - } - return false; - } - // We need to do one-off checks before we can begin the /sync loop. // These are: // 1) We need to get push rules so we can check if events should bing as we get @@ -489,7 +489,7 @@ SyncApi.prototype.sync = function() { client.pushRules = result; } catch (err) { console.error("Getting push rules failed", err); - if (shouldAbortSync(err)) return; + if (self._shouldAbortSync(err)) return; // wait for saved sync to complete before doing anything else, // otherwise the sync state will end up being incorrect debuglog("Waiting for saved sync before retrying push rules..."); @@ -575,7 +575,7 @@ SyncApi.prototype.sync = function() { ); } catch (err) { console.error("Getting filter failed", err); - if (shouldAbortSync(err)) return; + if (self._shouldAbortSync(err)) return; // wait for saved sync to complete before doing anything else, // otherwise the sync state will end up being incorrect debuglog("Waiting for saved sync before retrying filter..."); @@ -889,9 +889,7 @@ SyncApi.prototype._onSyncError = function(err, syncOptions) { console.error("/sync error %s", err); console.error(err); - if (err.errcode === "M_UNKNOWN_TOKEN") { - console.warn("Access token no longer valid - stopping sync"); - this.stop(); + if(this._shouldAbortSync(err)) { return; } From 14973a35c2ae36597e68d46f99c8c8c8d11a3382 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Apr 2019 19:46:30 -0600 Subject: [PATCH 041/159] Use the right error object --- src/sync.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sync.js b/src/sync.js index 54853ef79..843e3cfb0 100644 --- a/src/sync.js +++ b/src/sync.js @@ -446,7 +446,7 @@ SyncApi.prototype._wasLazyLoadingToggled = async function(lazyLoadMembers) { }; SyncApi.prototype._shouldAbortSync = function(error) { - if (err.errcode === "M_UNKNOWN_TOKEN") { + if (error.errcode === "M_UNKNOWN_TOKEN") { // The logout already happened, we just need to stop. console.warn("Token no longer valid - assuming logout"); self.stop(); From deb7433453cd5bcc291acb01f4aa0ecdc35f8299 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Apr 2019 20:17:42 -0600 Subject: [PATCH 042/159] Use toMatch for presence events We don't pass the reference through, so the test fails with toEqual --- spec/integ/matrix-client-event-emitter.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/integ/matrix-client-event-emitter.spec.js b/spec/integ/matrix-client-event-emitter.spec.js index e2fc810af..bc3a07214 100644 --- a/spec/integ/matrix-client-event-emitter.spec.js +++ b/spec/integ/matrix-client-event-emitter.spec.js @@ -157,7 +157,7 @@ describe("MatrixClient events", function() { return; } - expect(event.event).toEqual(SYNC_DATA.presence.events[0]); + expect(event.event).toMatch(SYNC_DATA.presence.events[0]); expect(user.presence).toEqual( SYNC_DATA.presence.events[0].content.presence, ); From 491226a916097d4c2da2b4ee1dec774fce959339 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Apr 2019 21:04:29 -0600 Subject: [PATCH 043/159] Use the right this in _shouldAbortSync --- src/sync.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sync.js b/src/sync.js index 843e3cfb0..3d8df8f1a 100644 --- a/src/sync.js +++ b/src/sync.js @@ -449,7 +449,7 @@ SyncApi.prototype._shouldAbortSync = function(error) { if (error.errcode === "M_UNKNOWN_TOKEN") { // The logout already happened, we just need to stop. console.warn("Token no longer valid - assuming logout"); - self.stop(); + this.stop(); return true; } return false; From fe47435fc7e77aea93ff5428269367d0bd50741b Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Apr 2019 21:12:17 -0600 Subject: [PATCH 044/159] Fix tests for autodiscovery --- spec/unit/autodiscovery.spec.js | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/spec/unit/autodiscovery.spec.js b/spec/unit/autodiscovery.spec.js index 9928e098b..e73121c37 100644 --- a/spec/unit/autodiscovery.spec.js +++ b/spec/unit/autodiscovery.spec.js @@ -94,7 +94,7 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_PROMPT", - error: "Invalid homeserver discovery response", + error: "Failed to get autodiscovery configuration from server", base_url: null, }, "m.identity_server": { @@ -117,7 +117,7 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_PROMPT", - error: "Invalid homeserver discovery response", + error: "Failed to get autodiscovery configuration from server", base_url: null, }, "m.identity_server": { @@ -140,7 +140,7 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_PROMPT", - error: "Invalid homeserver discovery response", + error: "Failed to get autodiscovery configuration from server", base_url: null, }, "m.identity_server": { @@ -163,7 +163,7 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_PROMPT", - error: "Invalid homeserver discovery response", + error: "Failed to get autodiscovery configuration from server", base_url: null, }, "m.identity_server": { @@ -191,7 +191,7 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_PROMPT", - error: "Invalid homeserver discovery response", + error: "Failed to get autodiscovery configuration from server", base_url: null, }, "m.identity_server": { @@ -217,7 +217,7 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_PROMPT", - error: "Invalid homeserver discovery response", + error: "Failed to get autodiscovery configuration from server", base_url: null, }, "m.identity_server": { @@ -245,7 +245,7 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_ERROR", - error: "Invalid homeserver discovery response", + error: "Invalid base_url for m.homeserver", base_url: null, }, "m.identity_server": { @@ -274,7 +274,8 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_ERROR", - error: "Invalid homeserver discovery response", + error: "Homeserver URL does not appear to be a " + + "valid Matrix homeserver", base_url: null, }, "m.identity_server": { @@ -303,7 +304,8 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_ERROR", - error: "Invalid homeserver discovery response", + error: "Homeserver URL does not appear to be a " + + "valid Matrix homeserver", base_url: null, }, "m.identity_server": { @@ -334,7 +336,8 @@ describe("AutoDiscovery", function() { const expected = { "m.homeserver": { state: "FAIL_ERROR", - error: "Invalid homeserver discovery response", + error: "Homeserver URL does not appear to be a " + + "valid Matrix homeserver", base_url: null, }, "m.identity_server": { @@ -446,7 +449,7 @@ describe("AutoDiscovery", function() { }, "m.identity_server": { state: "FAIL_ERROR", - error: "Invalid identity server discovery response", + error: "Invalid base_url for m.identity_server", base_url: null, }, }; @@ -486,7 +489,7 @@ describe("AutoDiscovery", function() { }, "m.identity_server": { state: "FAIL_ERROR", - error: "Invalid identity server discovery response", + error: "Invalid base_url for m.identity_server", base_url: null, }, }; @@ -527,7 +530,8 @@ describe("AutoDiscovery", function() { }, "m.identity_server": { state: "FAIL_ERROR", - error: "Invalid identity server discovery response", + error: "Identity server URL does not appear to be a " + + "valid identity server", base_url: null, }, }; @@ -568,7 +572,8 @@ describe("AutoDiscovery", function() { }, "m.identity_server": { state: "FAIL_ERROR", - error: "Invalid identity server discovery response", + error: "Identity server URL does not appear to be a " + + "valid identity server", base_url: null, }, }; From 7cede221de62184fd8887fbf94d67c6ff57dac64 Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Apr 2019 14:49:01 -0600 Subject: [PATCH 045/159] Support being fed a .well-known config object for validation Used by Riot to consume the user's provided config. This also includes a change to carry over custom keys on m.homeserver and m.identity_server which aren't intentionally controlled. --- src/autodiscovery.js | 256 ++++++++++++++++++++++++++----------------- 1 file changed, 157 insertions(+), 99 deletions(-) diff --git a/src/autodiscovery.js b/src/autodiscovery.js index f8723184c..811ff9c19 100644 --- a/src/autodiscovery.js +++ b/src/autodiscovery.js @@ -137,6 +137,160 @@ export class AutoDiscovery { */ static get SUCCESS() { return "SUCCESS"; } + /** + * Validates and verifies client configuration information for purposes + * of logging in. Such information includes the homeserver URL + * and identity server URL the client would want. Additional details + * may also be included, and will be transparently brought into the + * response object unaltered. + * @param {string} wellknown The configuration object itself, as returned + * by the .well-known auto-discovery endpoint. + * @return {Promise} Resolves to the verified + * configuration, which may include error states. Rejects on unexpected + * failure, not when verification fails. + */ + static async fromDiscoveryConfig(wellknown) { + // Step 1 is to get the config, which is provided to us here. + + // We default to an error state to make the first few checks easier to + // write. We'll update the properties of this object over the duration + // of this function. + const clientConfig = { + "m.homeserver": { + state: AutoDiscovery.FAIL_ERROR, + error: "Invalid homeserver discovery response", + base_url: null, + }, + "m.identity_server": { + // Technically, we don't have a problem with the identity server + // config at this point. + state: AutoDiscovery.PROMPT, + error: null, + base_url: null, + }, + }; + + if (!wellknown || + !wellknown["m.homeserver"] || + !wellknown["m.homeserver"]["base_url"]) { + logger.error("No m.homeserver key/base_url in config"); + + clientConfig["m.homeserver"].state = AutoDiscovery.FAIL_PROMPT; + clientConfig["m.homeserver"].error = "Invalid base_url for m.homeserver"; + + return Promise.resolve(clientConfig); + } + + // Step 2: Make sure the homeserver URL is valid *looking*. We'll make + // sure it points to a homeserver in Step 3. + const hsUrl = this._sanitizeWellKnownUrl( + wellknown["m.homeserver"]["base_url"], + ); + if (!hsUrl) { + logger.error("Invalid base_url for m.homeserver"); + clientConfig["m.homeserver"].error = "Invalid base_url for m.homeserver"; + return Promise.resolve(clientConfig); + } + + // Step 3: Make sure the homeserver URL points to a homeserver. + const hsVersions = await this._fetchWellKnownObject( + `${hsUrl}/_matrix/client/versions`, + ); + if (!hsVersions || !hsVersions.raw["versions"]) { + logger.error("Invalid /versions response"); + clientConfig["m.homeserver"].error = + "Homeserver URL does not appear to be a valid Matrix homeserver"; + return Promise.resolve(clientConfig); + } + + // Step 4: Now that the homeserver looks valid, update our client config. + clientConfig["m.homeserver"] = { + state: AutoDiscovery.SUCCESS, + error: null, + base_url: hsUrl, + }; + + // Step 5: Try to pull out the identity server configuration + let isUrl = ""; + if (wellknown["m.identity_server"]) { + // We prepare a failing identity server response to save lines later + // in this branch. Note that we also fail the homeserver check in the + // object because according to the spec we're supposed to FAIL_ERROR + // if *anything* goes wrong with the IS validation, including invalid + // format. This means we're supposed to stop discovery completely. + const failingClientConfig = { + "m.homeserver": { + state: AutoDiscovery.FAIL_ERROR, + error: "Invalid identity server discovery response", + + // We'll provide the base_url that was previously valid for + // debugging purposes. + base_url: clientConfig["m.homeserver"].base_url, + }, + "m.identity_server": { + state: AutoDiscovery.FAIL_ERROR, + error: "Invalid identity server discovery response", + base_url: null, + }, + }; + + // Step 5a: Make sure the URL is valid *looking*. We'll make sure it + // points to an identity server in Step 5b. + isUrl = this._sanitizeWellKnownUrl( + wellknown["m.identity_server"]["base_url"], + ); + if (!isUrl) { + logger.error("Invalid base_url for m.identity_server"); + failingClientConfig["m.identity_server"].error = + "Invalid base_url for m.identity_server"; + return Promise.resolve(failingClientConfig); + } + + // Step 5b: Verify there is an identity server listening on the provided + // URL. + const isResponse = await this._fetchWellKnownObject( + `${isUrl}/_matrix/identity/api/v1`, + ); + if (!isResponse || !isResponse.raw || isResponse.action !== "SUCCESS") { + logger.error("Invalid /api/v1 response"); + failingClientConfig["m.identity_server"].error = + "Identity server URL does not appear to be a valid identity server"; + return Promise.resolve(failingClientConfig); + } + } + + // Step 6: Now that the identity server is valid, or never existed, + // populate the IS section. + if (isUrl && isUrl.length > 0) { + clientConfig["m.identity_server"] = { + state: AutoDiscovery.SUCCESS, + error: null, + base_url: isUrl, + }; + } + + // Step 7: Copy any other keys directly into the clientConfig. This is for + // things like custom configuration of services. + Object.keys(wellknown) + .map((k) => { + if (k === "m.homeserver" || k === "m.identity_server") { + // Only copy selected parts of the config to avoid overwriting + // important information. + const notProps = ["error", "state", "base_url"]; + for (const prop of Object.keys(wellknown[k])) { + if (notProps.includes(prop)) continue; + clientConfig[k][prop] = wellknown[k][prop]; + } + } else { + // Just copy the whole thing over otherwise + clientConfig[k] = wellknown[k]; + } + }); + + // Step 8: Give the config to the caller (finally) + return Promise.resolve(clientConfig); + } + /** * Attempts to automatically discover client configuration information * prior to logging in. Such information includes the homeserver URL @@ -188,9 +342,7 @@ export class AutoDiscovery { const wellknown = await this._fetchWellKnownObject( `https://${domain}/.well-known/matrix/client`, ); - if (!wellknown || wellknown.action !== "SUCCESS" - || !wellknown.raw["m.homeserver"] - || !wellknown.raw["m.homeserver"]["base_url"]) { + if (!wellknown || wellknown.action !== "SUCCESS") { logger.error("No m.homeserver key in well-known response"); if (wellknown.reason) logger.error(wellknown.reason); if (wellknown.action === "IGNORE") { @@ -208,102 +360,8 @@ export class AutoDiscovery { return Promise.resolve(clientConfig); } - // Step 2: Make sure the homeserver URL is valid *looking*. We'll make - // sure it points to a homeserver in Step 3. - const hsUrl = this._sanitizeWellKnownUrl( - wellknown.raw["m.homeserver"]["base_url"], - ); - if (!hsUrl) { - logger.error("Invalid base_url for m.homeserver"); - clientConfig["m.homeserver"].error = "Invalid base_url for m.homeserver"; - return Promise.resolve(clientConfig); - } - - // Step 3: Make sure the homeserver URL points to a homeserver. - const hsVersions = await this._fetchWellKnownObject( - `${hsUrl}/_matrix/client/versions`, - ); - if (!hsVersions || !hsVersions.raw["versions"]) { - logger.error("Invalid /versions response"); - clientConfig["m.homeserver"].error = - "Homeserver URL does not appear to be a valid Matrix homeserver"; - return Promise.resolve(clientConfig); - } - - // Step 4: Now that the homeserver looks valid, update our client config. - clientConfig["m.homeserver"] = { - state: AutoDiscovery.SUCCESS, - error: null, - base_url: hsUrl, - }; - - // Step 5: Try to pull out the identity server configuration - let isUrl = ""; - if (wellknown.raw["m.identity_server"]) { - // We prepare a failing identity server response to save lines later - // in this branch. Note that we also fail the homeserver check in the - // object because according to the spec we're supposed to FAIL_ERROR - // if *anything* goes wrong with the IS validation, including invalid - // format. This means we're supposed to stop discovery completely. - const failingClientConfig = { - "m.homeserver": { - state: AutoDiscovery.FAIL_ERROR, - error: "Invalid identity server discovery response", - - // We'll provide the base_url that was previously valid for - // debugging purposes. - base_url: clientConfig["m.homeserver"].base_url, - }, - "m.identity_server": { - state: AutoDiscovery.FAIL_ERROR, - error: "Invalid identity server discovery response", - base_url: null, - }, - }; - - // Step 5a: Make sure the URL is valid *looking*. We'll make sure it - // points to an identity server in Step 5b. - isUrl = this._sanitizeWellKnownUrl( - wellknown.raw["m.identity_server"]["base_url"], - ); - if (!isUrl) { - logger.error("Invalid base_url for m.identity_server"); - failingClientConfig["m.identity_server"].error = - "Invalid base_url for m.identity_server"; - return Promise.resolve(failingClientConfig); - } - - // Step 5b: Verify there is an identity server listening on the provided - // URL. - const isResponse = await this._fetchWellKnownObject( - `${isUrl}/_matrix/identity/api/v1`, - ); - if (!isResponse || !isResponse.raw || isResponse.action !== "SUCCESS") { - logger.error("Invalid /api/v1 response"); - failingClientConfig["m.identity_server"].error = - "Identity server URL does not appear to be a valid identity server"; - return Promise.resolve(failingClientConfig); - } - } - - // Step 6: Now that the identity server is valid, or never existed, - // populate the IS section. - if (isUrl && isUrl.length > 0) { - clientConfig["m.identity_server"] = { - state: AutoDiscovery.SUCCESS, - error: null, - base_url: isUrl, - }; - } - - // Step 7: Copy any other keys directly into the clientConfig. This is for - // things like custom configuration of services. - Object.keys(wellknown.raw) - .filter((k) => k !== "m.homeserver" && k !== "m.identity_server") - .map((k) => clientConfig[k] = wellknown.raw[k]); - - // Step 8: Give the config to the caller (finally) - return Promise.resolve(clientConfig); + // Step 2: Validate and parse the config + return AutoDiscovery.fromDiscoveryConfig(wellknown.raw); } /** From 33d2837ec3ea333df1202606562fe4073a6f5b5e Mon Sep 17 00:00:00 2001 From: Travis Ralston Date: Mon, 15 Apr 2019 19:19:38 -0600 Subject: [PATCH 046/159] Use packages.matrix.org for Olm See https://github.com/vector-im/riot-web/issues/9497 --- README.md | 8 ++++---- package.json | 2 +- yarn.lock | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index e36ee1a2f..0869d3556 100644 --- a/README.md +++ b/README.md @@ -318,18 +318,18 @@ specification. To provide the Olm library in a browser application: - * download the transpiled libolm (from https://matrix.org/packages/npm/olm/). + * download the transpiled libolm (from https://packages.matrix.org/npm/olm/). * load ``olm.js`` as a ``