1
0
mirror of https://github.com/matrix-org/matrix-js-sdk.git synced 2025-07-31 15:24:23 +03:00

Merge branch 'develop' into andybalaam/unrevert-deletion-move-prs

This commit is contained in:
Andy Balaam
2023-11-14 16:13:56 +00:00
committed by GitHub
24 changed files with 494 additions and 61 deletions

View File

@ -0,0 +1,28 @@
name: Sign Release Tarball
description: Generates signature for release tarball and uploads it as a release asset
inputs:
gpg-fingerprint:
description: Fingerprint of the GPG key to use for signing the tarball.
required: true
upload-url:
description: GitHub release upload URL to upload the signature file to.
required: true
runs:
using: composite
steps:
- name: Generate tarball signature
shell: bash
run: |
git -c tar.tar.gz.command='gzip -cn' archive --format=tar.gz --prefix="${REPO#*/}-${VERSION#v}/" -o "/tmp/${VERSION}.tar.gz" "${VERSION}"
gpg -u "$GPG_FINGERPRINT" --armor --output "${VERSION}.tar.gz.asc" --detach-sig "/tmp/${VERSION}.tar.gz"
rm "/tmp/${VERSION}.tar.gz"
env:
GPG_FINGERPRINT: ${{ inputs.gpg-fingerprint }}
REPO: ${{ github.repository }}
- name: Upload tarball signature
if: ${{ inputs.upload-url }}
uses: shogo82148/actions-upload-release-asset@dccd6d23e64fd6a746dce6814c0bde0a04886085 # v1
with:
upload_url: ${{ inputs.upload-url }}
asset_path: ${{ env.VERSION }}.tar.gz.asc

View File

@ -0,0 +1,41 @@
name: Upload release assets
description: Uploads assets to an existing release and optionally signs them
inputs:
gpg-fingerprint:
description: Fingerprint of the GPG key to use for signing the assets, if any.
required: false
upload-url:
description: GitHub release upload URL to upload the assets to.
required: true
asset-path:
description: |
The path to the asset you want to upload, if any. You can use glob patterns here.
Will be GPG signed and an `.asc` file included in the release artifacts if `gpg-fingerprint` is set.
required: true
runs:
using: composite
steps:
- name: Sign assets
if: inputs.gpg-fingerprint
shell: bash
run: |
for FILE in $ASSET_PATH
do
gpg -u "$GPG_FINGERPRINT" --armor --output "$FILE".asc --detach-sig "$FILE"
done
env:
GPG_FINGERPRINT: ${{ inputs.gpg-fingerprint }}
ASSET_PATH: ${{ inputs.asset-path }}
- name: Upload asset signatures
if: inputs.gpg-fingerprint
uses: shogo82148/actions-upload-release-asset@dccd6d23e64fd6a746dce6814c0bde0a04886085 # v1
with:
upload_url: ${{ inputs.upload-url }}
asset_path: ${{ inputs.asset-path }}.asc
- name: Upload assets
uses: shogo82148/actions-upload-release-asset@dccd6d23e64fd6a746dce6814c0bde0a04886085 # v1
with:
upload_url: ${{ inputs.upload-url }}
asset_path: ${{ inputs.asset-path }}

View File

@ -19,6 +19,11 @@ version-resolver:
labels: labels:
- "X-Breaking-Change" - "X-Breaking-Change"
default: minor default: minor
exclude-labels:
- "T-Task"
- "X-Reverted"
exclude-contributors:
- "RiotRobot"
template: | template: |
$CHANGES $CHANGES
prerelease: true prerelease: true

View File

@ -20,7 +20,7 @@ jobs:
# from creeping in. They take a long time to run and consume 4 concurrent runners. # from creeping in. They take a long time to run and consume 4 concurrent runners.
if: github.event.workflow_run.event == 'merge_group' if: github.event.workflow_run.event == 'merge_group'
uses: matrix-org/matrix-react-sdk/.github/workflows/cypress.yaml@v3.83.0-rc.1 uses: matrix-org/matrix-react-sdk/.github/workflows/cypress.yaml@03b01b4a50d0f3fbbfa6c1a9314ef2d346d089d4
permissions: permissions:
actions: read actions: read
issues: read issues: read
@ -33,7 +33,6 @@ jobs:
TCMS_PASSWORD: ${{ secrets.TCMS_PASSWORD }} TCMS_PASSWORD: ${{ secrets.TCMS_PASSWORD }}
with: with:
react-sdk-repository: matrix-org/matrix-react-sdk react-sdk-repository: matrix-org/matrix-react-sdk
rust-crypto: true
# We want to make the cypress tests a required check for the merge queue. # We want to make the cypress tests a required check for the merge queue.
# #

View File

@ -6,6 +6,10 @@ on:
required: true required: true
NPM_TOKEN: NPM_TOKEN:
required: false required: false
GPG_PASSPHRASE:
required: false
GPG_PRIVATE_KEY:
required: false
inputs: inputs:
final: final:
description: Make final release description: Make final release
@ -22,11 +26,39 @@ on:
`version` can be `"current"` to leave it at the current version. `version` can be `"current"` to leave it at the current version.
type: string type: string
required: false required: false
include-changes:
description: Project to include changelog entries from in this release.
type: string
required: false
gpg-fingerprint:
description: Fingerprint of the GPG key to use for signing the git tag and assets, if any.
type: string
required: false
asset-path:
description: |
The path to the asset you want to upload, if any. You can use glob patterns here.
Will be GPG signed and an `.asc` file included in the release artifacts if `gpg-fingerprint` is set.
type: string
required: false
expected-asset-count:
description: The number of expected assets, including signatures, excluding generated zip & tarball.
type: number
required: false
jobs: jobs:
release: release:
name: Release name: Release
runs-on: ubuntu-latest runs-on: ubuntu-latest
environment: Release
steps: steps:
- name: Load GPG key
id: gpg
if: inputs.gpg-fingerprint
uses: crazy-max/ghaction-import-gpg@82a020f1f7f605c65dd2449b392a52c3fcfef7ef # v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
fingerprint: ${{ inputs.gpg-fingerprint }}
- name: Get draft release - name: Get draft release
id: release id: release
uses: cardinalby/git-get-release-action@cedef2faf69cb7c55b285bad07688d04430b7ada # v1 uses: cardinalby/git-get-release-action@cedef2faf69cb7c55b285bad07688d04430b7ada # v1
@ -49,17 +81,45 @@ jobs:
persist-credentials: false persist-credentials: false
path: .action-repo path: .action-repo
sparse-checkout: | sparse-checkout: |
.github/actions
scripts/release scripts/release
- name: Load version - name: Prepare variables
run: echo "VERSION=$VERSION" >> $GITHUB_ENV id: prepare
run: |
echo "VERSION=$VERSION" >> $GITHUB_ENV
{
echo "RELEASE_NOTES<<EOF"
echo "$BODY"
echo "EOF"
} >> $GITHUB_ENV
HAS_DIST=0
jq -e .scripts.dist package.json >/dev/null 2>&1 && HAS_DIST=1
echo "has-dist-script=$HAS_DIST" >> $GITHUB_OUTPUT
env: env:
BODY: ${{ steps.release.outputs.body }}
VERSION: ${{ steps.release.outputs.tag_name }} VERSION: ${{ steps.release.outputs.tag_name }}
- name: Finalise version - name: Finalise version
if: inputs.mode == 'final' if: inputs.final
run: echo "VERSION=$(echo $VERSION | cut -d- -f1)" >> $GITHUB_ENV run: echo "VERSION=$(echo $VERSION | cut -d- -f1)" >> $GITHUB_ENV
- name: Check version number not in use
uses: actions/github-script@v6
with:
script: |
const { VERSION } = process.env;
github.rest.repos.getReleaseByTag({
owner: context.repo.owner,
repo: context.repo.repo,
tag: VERSION,
}).then(() => {
core.setFailed(`Version ${VERSION} already exists`);
}).catch(() => {
// This is fine, we expect there to not be any release with this version yet
});
- name: Set up git - name: Set up git
run: | run: |
git config --global user.email "releases@riot.im" git config --global user.email "releases@riot.im"
@ -73,8 +133,10 @@ jobs:
run: "yarn install --frozen-lockfile" run: "yarn install --frozen-lockfile"
- name: Update dependencies - name: Update dependencies
id: update-dependencies
if: inputs.dependencies if: inputs.dependencies
run: | run: |
UPDATED=()
while IFS= read -r DEPENDENCY; do while IFS= read -r DEPENDENCY; do
[ -z "$DEPENDENCY" ] && continue [ -z "$DEPENDENCY" ] && continue
IFS="=" read -r PACKAGE UPDATE_VERSION <<< "$DEPENDENCY" IFS="=" read -r PACKAGE UPDATE_VERSION <<< "$DEPENDENCY"
@ -98,7 +160,11 @@ jobs:
yarn upgrade "$PACKAGE@$UPDATE_VERSION" --exact yarn upgrade "$PACKAGE@$UPDATE_VERSION" --exact
git add -u git add -u
git commit -m "Upgrade $PACKAGE to $UPDATE_VERSION" git commit -m "Upgrade $PACKAGE to $UPDATE_VERSION"
UPDATED+=("$PACKAGE")
done <<< "$DEPENDENCIES" done <<< "$DEPENDENCIES"
JSON=$(jq --compact-output --null-input '$ARGS.positional' --args -- "${UPDATED[@]}")
echo "updated=$JSON" >> $GITHUB_OUTPUT
env: env:
DEPENDENCIES: ${{ inputs.dependencies }} DEPENDENCIES: ${{ inputs.dependencies }}
@ -113,10 +179,32 @@ jobs:
fi fi
- name: Bump package.json version - name: Bump package.json version
run: yarn version --no-git-tag-version --new-version "$VERSION" run: yarn version --no-git-tag-version --new-version "${VERSION#v}"
- name: Ingest upstream changes
if: |
inputs.dependencies &&
inputs.include-changes &&
contains(fromJSON(steps.update-dependencies.outputs.updated), inputs.include-changes)
uses: actions/github-script@v6
env:
RELEASE_ID: ${{ steps.release.outputs.id }}
DEPENDENCY: ${{ inputs.include-changes }}
with:
retries: 3
script: |
const { RELEASE_ID: releaseId, DEPENDENCY, VERSION } = process.env;
const { owner, repo } = context.repo;
const script = require("./.action-repo/scripts/release/merge-release-notes.js");
const notes = await script({
github,
releaseId,
dependencies: [DEPENDENCY.replace("$VERSION", VERSION)],
});
core.exportVariable("RELEASE_NOTES", notes);
- name: Add to CHANGELOG.md - name: Add to CHANGELOG.md
if: inputs.mode == 'final' if: inputs.final
run: | run: |
mv CHANGELOG.md CHANGELOG.md.old mv CHANGELOG.md CHANGELOG.md.old
HEADER="Changes in [${VERSION#v}](https://github.com/${{ github.repository }}/releases/tag/$VERSION) ($(date '+%Y-%m-%d'))" HEADER="Changes in [${VERSION#v}](https://github.com/${{ github.repository }}/releases/tag/$VERSION) ($(date '+%Y-%m-%d'))"
@ -125,25 +213,84 @@ jobs:
echo "$HEADER" echo "$HEADER"
printf '=%.0s' $(seq ${#HEADER}) printf '=%.0s' $(seq ${#HEADER})
echo "" echo ""
echo "$BODY" echo "$RELEASE_NOTES"
echo "" echo ""
} > CHANGELOG.md } > CHANGELOG.md
cat CHANGELOG.md.old >> CHANGELOG.md cat CHANGELOG.md.old >> CHANGELOG.md
rm CHANGELOG.md.old rm CHANGELOG.md.old
git add CHANGELOG.md git add CHANGELOG.md
env:
BODY: ${{ steps.release.outputs.body }}
- name: Run pre-release script to update package.json fields - name: Run pre-release script to update package.json fields
run: | run: |
./.action-repo/scripts/release/pre-release.sh ./.action-repo/scripts/release/pre-release.sh
git add package.json git add package.json
- name: Commit and push changes - name: Commit changes
run: git commit -m "$VERSION"
- name: Build assets
if: steps.prepare.outputs.has-dist-script == '1'
run: DIST_VERSION="$VERSION" yarn dist
- name: Upload release assets & signatures
if: inputs.asset-path
uses: ./.action-repo/.github/actions/upload-release-assets
with:
gpg-fingerprint: ${{ inputs.gpg-fingerprint }}
upload-url: ${{ steps.release.outputs.upload_url }}
asset-path: ${{ inputs.asset-path }}
- name: Create signed tag
if: inputs.gpg-fingerprint
run: | run: |
git commit -m "$VERSION" GIT_COMMITTER_EMAIL="$SIGNING_ID" GPG_TTY=$(tty) git tag -u "$SIGNING_ID" -m "Release $VERSION" "$VERSION"
git push origin staging env:
SIGNING_ID: ${{ steps.gpg.outputs.email }}
- name: Generate & upload tarball signature
if: inputs.gpg-fingerprint
uses: ./.action-repo/.github/actions/sign-release-tarball
with:
gpg-fingerprint: ${{ inputs.gpg-fingerprint }}
upload-url: ${{ steps.release.outputs.upload_url }}
# We defer pushing changes until after the release assets are built,
# signed & uploaded to improve the atomicity of this action.
- name: Push changes to staging
run: |
git push origin staging $TAG
git reset --hard
env:
TAG: ${{ inputs.gpg-fingerprint && env.VERSION || '' }}
- name: Validate tarball signature
if: inputs.gpg-fingerprint
run: |
wget https://github.com/$GITHUB_REPOSITORY/archive/refs/tags/$VERSION.tar.gz
gpg --verify "$VERSION.tar.gz.asc" "$VERSION.tar.gz"
- name: Validate release has expected assets
if: inputs.expected-asset-count
uses: actions/github-script@v6
env:
RELEASE_ID: ${{ steps.release.outputs.id }}
EXPECTED_ASSET_COUNT: ${{ inputs.expected-asset-count }}
with:
retries: 3
script: |
const { RELEASE_ID: release_id, EXPECTED_ASSET_COUNT } = process.env;
const { owner, repo } = context.repo;
const { data: release } = await github.rest.repos.getRelease({
owner,
repo,
release_id,
});
if (release.assets.length !== parseInt(EXPECTED_ASSET_COUNT, 10)) {
core.setFailed(`Found ${release.assets.length} assets but expected ${EXPECTED_ASSET_COUNT}`);
}
- name: Merge to master - name: Merge to master
if: inputs.final if: inputs.final
@ -154,15 +301,14 @@ jobs:
- name: Publish release - name: Publish release
uses: actions/github-script@v6 uses: actions/github-script@v6
id: my-script
env: env:
RELEASE_ID: ${{ steps.release.outputs.id }} RELEASE_ID: ${{ steps.release.outputs.id }}
FINAL: ${{ inputs.final }} FINAL: ${{ inputs.final }}
with: with:
result-encoding: string
retries: 3 retries: 3
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
script: | script: |
let { RELEASE_ID: release_id, VERSION, FINAL } = process.env; const { RELEASE_ID: release_id, RELEASE_NOTES, VERSION, FINAL } = process.env;
const { owner, repo } = context.repo; const { owner, repo } = context.repo;
const opts = { const opts = {
@ -172,6 +318,7 @@ jobs:
tag_name: VERSION, tag_name: VERSION,
name: VERSION, name: VERSION,
draft: false, draft: false,
body: RELEASE_NOTES,
}; };
if (FINAL == "true") { if (FINAL == "true") {
@ -188,3 +335,20 @@ jobs:
uses: matrix-org/matrix-js-sdk/.github/workflows/release-npm.yml@develop uses: matrix-org/matrix-js-sdk/.github/workflows/release-npm.yml@develop
secrets: secrets:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }} NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
update-labels:
name: Advance release blocker labels
needs: release
runs-on: ubuntu-latest
steps:
- id: repository
run: echo "REPO=${GITHUB_REPOSITORY#*/}" >> $GITHUB_OUTPUT
- uses: garganshu/github-label-updater@3770d15ebfed2fe2cb06a241047bc340f774a7d1 # v1.0.0
with:
owner: ${{ github.repository_owner }}
repo: ${{ steps.repository.outputs.REPO }}
token: ${{ secrets.GITHUB_TOKEN }}
filter-labels: X-Upcoming-Release-Blocker
remove-labels: X-Upcoming-Release-Blocker
add-labels: X-Release-Blocker

View File

@ -2,13 +2,20 @@ name: Release Drafter
on: on:
push: push:
branches: [staging] branches: [staging]
workflow_dispatch:
inputs:
previous-version:
description: What release to use as a base for release note purposes
required: false
type: string
concurrency: ${{ github.workflow }} concurrency: ${{ github.workflow }}
jobs: jobs:
draft: draft:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: release-drafter/release-drafter@dabcf3767562210392d862070ed2ef6434b9bc6f # v5 - uses: release-drafter/release-drafter@e64b19c4c46173209ed9f2e5a2f4ca7de89a0e86 # v5
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
disable-autolabeler: true disable-autolabeler: true
previous-version: ${{ inputs.previous-version }}

View File

@ -11,6 +11,8 @@ jobs:
steps: steps:
- name: 🧮 Checkout code - name: 🧮 Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
ref: staging
- name: 🔧 Yarn cache - name: 🔧 Yarn cache
uses: actions/setup-node@v4 uses: actions/setup-node@v4

View File

@ -18,7 +18,7 @@ jobs:
strategy: strategy:
matrix: matrix:
specs: [integ, unit] specs: [integ, unit]
node: [18, "*"] node: [18, "lts/*", 21]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4

10
.github/workflows/triage-labelled.yml vendored Normal file
View File

@ -0,0 +1,10 @@
name: Move labelled issues to correct projects
on:
issues:
types: [labeled]
jobs:
call-triage-labelled:
uses: vector-im/element-web/.github/workflows/triage-labelled.yml@develop
secrets: inherit

View File

@ -1,3 +1,9 @@
Changes in [30.0.1](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v30.0.1) (2023-11-13)
==================================================================================================
## 🐛 Bug Fixes
* Ensure `setUserCreator` is called when a store is assigned ([\#3867](https://github.com/matrix-org/matrix-js-sdk/pull/3867)). Fixes vector-im/element-web#26520. Contributed by @MidhunSureshR.
Changes in [30.0.0](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v30.0.0) (2023-11-07) Changes in [30.0.0](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v30.0.0) (2023-11-07)
================================================================================================== ==================================================================================================

View File

@ -1,6 +1,6 @@
{ {
"name": "matrix-js-sdk", "name": "matrix-js-sdk",
"version": "30.0.0", "version": "30.0.1",
"description": "Matrix Client-Server SDK for Javascript", "description": "Matrix Client-Server SDK for Javascript",
"engines": { "engines": {
"node": ">=18.0.0" "node": ">=18.0.0"
@ -51,7 +51,7 @@
], ],
"dependencies": { "dependencies": {
"@babel/runtime": "^7.12.5", "@babel/runtime": "^7.12.5",
"@matrix-org/matrix-sdk-crypto-wasm": "^2.2.0", "@matrix-org/matrix-sdk-crypto-wasm": "^3.0.1",
"another-json": "^0.2.0", "another-json": "^0.2.0",
"bs58": "^5.0.0", "bs58": "^5.0.0",
"content-type": "^1.0.4", "content-type": "^1.0.4",

View File

@ -0,0 +1,88 @@
#!/usr/bin/env node
const fs = require("fs");
async function getRelease(github, dependency) {
let owner;
let repo;
let tag;
if (dependency.includes("/") && dependency.includes("@")) {
owner = dependency.split("/")[0];
repo = dependency.split("/")[1].split("@")[0];
tag = dependency.split("@")[1];
} else {
const upstreamPackageJson = JSON.parse(fs.readFileSync(`./node_modules/${dependency}/package.json`, "utf8"));
[owner, repo] = upstreamPackageJson.repository.url.split("/").slice(-2);
tag = `v${upstreamPackageJson.version}`;
}
const response = await github.rest.repos.getReleaseByTag({
owner,
repo,
tag,
});
return response.data;
}
const main = async ({ github, releaseId, dependencies }) => {
const { GITHUB_REPOSITORY } = process.env;
const [owner, repo] = GITHUB_REPOSITORY.split("/");
const sections = new Map();
let heading = null;
for (const dependency of dependencies) {
const release = await getRelease(github, dependency);
for (const line of release.body.split("\n")) {
if (line.startsWith("#")) {
heading = line;
sections.set(heading, []);
continue;
}
if (heading && line) {
sections.get(heading).push(line);
}
}
}
const { data: release } = await github.rest.repos.getRelease({
owner,
repo,
release_id: releaseId,
});
heading = null;
const output = [];
for (const line of [...release.body.split("\n"), null]) {
if (line === null || line.startsWith("#")) {
if (heading && sections.has(heading)) {
const lastIsBlank = !output.at(-1)?.trim();
if (lastIsBlank) output.pop();
output.push(...sections.get(heading));
if (lastIsBlank) output.push("");
}
heading = line;
}
output.push(line);
}
return output.join("\n");
};
// This is just for testing locally
// Needs environment variables GITHUB_TOKEN & GITHUB_REPOSITORY
if (require.main === module) {
const { Octokit } = require("@octokit/rest");
const github = new Octokit({ auth: process.env.GITHUB_TOKEN });
if (process.argv.length < 4) {
// eslint-disable-next-line no-console
console.error("Usage: node merge-release-notes.js owner/repo:release_id npm-package-name ...");
process.exit(1);
}
const [releaseId, ...dependencies] = process.argv.slice(2);
main({ github, releaseId, dependencies }).then((output) => {
// eslint-disable-next-line no-console
console.log(output);
});
}
module.exports = main;

View File

@ -315,6 +315,7 @@ export interface IMessageOpts {
event?: boolean; event?: boolean;
relatesTo?: IEventRelation; relatesTo?: IEventRelation;
ts?: number; ts?: number;
unsigned?: IUnsigned;
} }
/** /**

View File

@ -85,9 +85,7 @@ describe("CallMembership", () => {
it("considers memberships expired when local age large", () => { it("considers memberships expired when local age large", () => {
const fakeEvent = makeMockEvent(1000); const fakeEvent = makeMockEvent(1000);
const evAge = 6000; fakeEvent.getLocalAge = jest.fn().mockReturnValue(6000);
fakeEvent.getLocalAge = jest.fn().mockReturnValue(evAge);
fakeEvent.localTimestamp = Date.now() - evAge;
const membership = new CallMembership(fakeEvent, membershipTemplate); const membership = new CallMembership(fakeEvent, membershipTemplate);
expect(membership.isExpired()).toEqual(true); expect(membership.isExpired()).toEqual(true);
}); });

View File

@ -61,7 +61,7 @@ export function mockRTCEvent(
getSender: jest.fn().mockReturnValue("@mock:user.example"), getSender: jest.fn().mockReturnValue("@mock:user.example"),
getTs: jest.fn().mockReturnValue(1000), getTs: jest.fn().mockReturnValue(1000),
getLocalAge: getLocalAgeFn, getLocalAge: getLocalAgeFn,
localTimestamp: Date.now() - getLocalAgeFn(), localTimestamp: Date.now(),
getRoomId: jest.fn().mockReturnValue(roomId), getRoomId: jest.fn().mockReturnValue(roomId),
sender: { sender: {
userId: "@mock:user.example", userId: "@mock:user.example",

View File

@ -24,7 +24,7 @@ import {
KeysUploadRequest, KeysUploadRequest,
RoomMessageRequest, RoomMessageRequest,
SignatureUploadRequest, SignatureUploadRequest,
SigningKeysUploadRequest, UploadSigningKeysRequest,
ToDeviceRequest, ToDeviceRequest,
} from "@matrix-org/matrix-sdk-crypto-wasm"; } from "@matrix-org/matrix-sdk-crypto-wasm";
@ -173,10 +173,10 @@ describe("OutgoingRequestProcessor", () => {
httpBackend.verifyNoOutstandingRequests(); httpBackend.verifyNoOutstandingRequests();
}); });
it("should handle SigningKeysUploadRequests without UIA", async () => { it("should handle UploadSigningKeysRequest without UIA", async () => {
// first, mock up a request as we might expect to receive it from the Rust layer ... // first, mock up a request as we might expect to receive it from the Rust layer ...
const testReq = { foo: "bar" }; const testReq = { foo: "bar" };
const outgoingRequest = new SigningKeysUploadRequest(JSON.stringify(testReq)); const outgoingRequest = new UploadSigningKeysRequest(JSON.stringify(testReq));
// ... then poke the request into the OutgoingRequestProcessor under test // ... then poke the request into the OutgoingRequestProcessor under test
const reqProm = processor.makeOutgoingRequest(outgoingRequest); const reqProm = processor.makeOutgoingRequest(outgoingRequest);
@ -200,10 +200,10 @@ describe("OutgoingRequestProcessor", () => {
httpBackend.verifyNoOutstandingRequests(); httpBackend.verifyNoOutstandingRequests();
}); });
it("should handle SigningKeysUploadRequests with UIA", async () => { it("should handle UploadSigningKeysRequest with UIA", async () => {
// first, mock up a request as we might expect to receive it from the Rust layer ... // first, mock up a request as we might expect to receive it from the Rust layer ...
const testReq = { foo: "bar" }; const testReq = { foo: "bar" };
const outgoingRequest = new SigningKeysUploadRequest(JSON.stringify(testReq)); const outgoingRequest = new UploadSigningKeysRequest(JSON.stringify(testReq));
// also create a UIA callback // also create a UIA callback
const authCallback: UIAuthCallback<Object> = async (makeRequest) => { const authCallback: UIAuthCallback<Object> = async (makeRequest) => {

View File

@ -22,12 +22,15 @@ import { Room } from "../../src/models/room";
import { EventTimeline } from "../../src/models/event-timeline"; import { EventTimeline } from "../../src/models/event-timeline";
import { TimelineIndex, TimelineWindow } from "../../src/timeline-window"; import { TimelineIndex, TimelineWindow } from "../../src/timeline-window";
import { mkMessage } from "../test-utils/test-utils"; import { mkMessage } from "../test-utils/test-utils";
import { MatrixEvent } from "../../src/models/event";
const ROOM_ID = "roomId"; const ROOM_ID = "roomId";
const USER_ID = "userId"; const USER_ID = "userId";
const mockClient = { const mockClient = {
getEventTimeline: jest.fn(), getEventTimeline: jest.fn(),
paginateEventTimeline: jest.fn(), paginateEventTimeline: jest.fn(),
supportsThreads: jest.fn(),
getUserId: jest.fn().mockReturnValue(USER_ID),
} as unknown as MockedObject<MatrixClient>; } as unknown as MockedObject<MatrixClient>;
/* /*
@ -64,6 +67,23 @@ function addEventsToTimeline(timeline: EventTimeline, numEvents: number, toStart
} }
} }
function createEvents(numEvents: number): Array<MatrixEvent> {
const ret = [];
for (let i = 0; i < numEvents; i++) {
ret.push(
mkMessage({
room: ROOM_ID,
user: USER_ID,
event: true,
unsigned: { age: 1 },
}),
);
}
return ret;
}
/* /*
* create a pair of linked timelines * create a pair of linked timelines
*/ */
@ -412,4 +432,46 @@ describe("TimelineWindow", function () {
expect(timelineWindow.canPaginate(EventTimeline.FORWARDS)).toBe(true); expect(timelineWindow.canPaginate(EventTimeline.FORWARDS)).toBe(true);
}); });
}); });
function idsOf(events: Array<MatrixEvent>): Array<string> {
return events.map((e) => (e ? e.getId() ?? "MISSING_ID" : "MISSING_EVENT"));
}
describe("removing events", () => {
it("should shorten if removing an event within the window makes it overflow", function () {
// Given a room with events in two timelines
const room = new Room(ROOM_ID, mockClient, USER_ID, { timelineSupport: true });
const timelineSet = room.getUnfilteredTimelineSet();
const liveTimeline = room.getLiveTimeline();
const oldTimeline = room.addTimeline();
liveTimeline.setNeighbouringTimeline(oldTimeline, EventTimeline.BACKWARDS);
oldTimeline.setNeighbouringTimeline(liveTimeline, EventTimeline.FORWARDS);
const oldEvents = createEvents(5);
const liveEvents = createEvents(5);
const [, , e3, e4, e5] = oldEvents;
const [, e7, e8, e9, e10] = liveEvents;
room.addLiveEvents(liveEvents);
room.addEventsToTimeline(oldEvents, true, oldTimeline);
// And 2 windows over the timelines in this room
const oldWindow = new TimelineWindow(mockClient, timelineSet);
oldWindow.load(e5.getId(), 6);
expect(idsOf(oldWindow.getEvents())).toEqual(idsOf([e5, e4, e3]));
const newWindow = new TimelineWindow(mockClient, timelineSet);
newWindow.load(e9.getId(), 4);
expect(idsOf(newWindow.getEvents())).toEqual(idsOf([e7, e8, e9, e10]));
// When I remove an event
room.removeEvent(e8.getId()!);
// Then the affected timeline is shortened (because it would have
// been too long with the removed event gone)
expect(idsOf(newWindow.getEvents())).toEqual(idsOf([e7, e9, e10]));
// And the unaffected one is not
expect(idsOf(oldWindow.getEvents())).toEqual(idsOf([e5, e4, e3]));
});
});
}); });

View File

@ -1217,7 +1217,7 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
public reEmitter = new TypedReEmitter<EmittedEvents, ClientEventHandlerMap>(this); public reEmitter = new TypedReEmitter<EmittedEvents, ClientEventHandlerMap>(this);
public olmVersion: [number, number, number] | null = null; // populated after initCrypto public olmVersion: [number, number, number] | null = null; // populated after initCrypto
public usingExternalCrypto = false; public usingExternalCrypto = false;
public store: Store; private _store!: Store;
public deviceId: string | null; public deviceId: string | null;
public credentials: { userId: string | null }; public credentials: { userId: string | null };
@ -1332,7 +1332,6 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
this.usingExternalCrypto = opts.usingExternalCrypto ?? false; this.usingExternalCrypto = opts.usingExternalCrypto ?? false;
this.store = opts.store || new StubStore(); this.store = opts.store || new StubStore();
this.store.setUserCreator((userId) => User.createUser(userId, this));
this.deviceId = opts.deviceId || null; this.deviceId = opts.deviceId || null;
this.sessionId = randomString(10); this.sessionId = randomString(10);
@ -1497,6 +1496,15 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
this._secretStorage = new ServerSideSecretStorageImpl(this, opts.cryptoCallbacks ?? {}); this._secretStorage = new ServerSideSecretStorageImpl(this, opts.cryptoCallbacks ?? {});
} }
public set store(newStore: Store) {
this._store = newStore;
this._store.setUserCreator((userId) => User.createUser(userId, this));
}
public get store(): Store {
return this._store;
}
/** /**
* High level helper method to begin syncing and poll for new events. To listen for these * High level helper method to begin syncing and poll for new events. To listen for these
* events, add a listener for {@link ClientEvent.Event} * events, add a listener for {@link ClientEvent.Event}

View File

@ -91,7 +91,7 @@ export class CallMembership {
} }
public isExpired(): boolean { public isExpired(): boolean {
return this.getMsUntilExpiry() <= 0; return this.getAbsoluteExpiry() < this.parentEvent.getTs() + this.parentEvent.getLocalAge();
} }
public getActiveFoci(): Focus[] { public getActiveFoci(): Focus[] {

View File

@ -392,7 +392,7 @@ export class MatrixEvent extends TypedEventEmitter<MatrixEventEmittedEvents, Mat
}); });
this.txnId = event.txn_id; this.txnId = event.txn_id;
this.localTimestamp = Date.now() - (this.getAge() ?? this.fallbackAge()); this.localTimestamp = Date.now() - (this.getAge() ?? 0);
this.reEmitter = new TypedReEmitter(this); this.reEmitter = new TypedReEmitter(this);
} }
@ -663,21 +663,6 @@ export class MatrixEvent extends TypedEventEmitter<MatrixEventEmittedEvents, Mat
return this.getUnsigned().age || this.event.age; // v2 / v1 return this.getUnsigned().age || this.event.age; // v2 / v1
} }
/**
* The fallbackAge is computed by using the origin_server_ts. So it is not adjusted
* to the local device clock. It should never be used.
* If there is no unsigned field in the event this is a better fallback then 0.
* It is supposed to only be used like this: `ev.getAge() ?? ev.fallbackAge()`
*/
private fallbackAge(): number {
if (!this.getAge()) {
logger.warn(
"Age for event was not available, using `now - origin_server_ts` as a fallback. If the device clock is not correct issues might occur.",
);
}
return Math.max(Date.now() - this.getTs(), 0);
}
/** /**
* Get the age of the event when this function was called. * Get the age of the event when this function was called.
* This is the 'age' field adjusted according to how long this client has * This is the 'age' field adjusted according to how long this client has
@ -1399,7 +1384,7 @@ export class MatrixEvent extends TypedEventEmitter<MatrixEventEmittedEvents, Mat
this.emit(MatrixEventEvent.LocalEventIdReplaced, this); this.emit(MatrixEventEvent.LocalEventIdReplaced, this);
} }
this.localTimestamp = Date.now() - (this.getAge() ?? this.fallbackAge()); this.localTimestamp = Date.now() - this.getAge()!;
} }
/** /**

View File

@ -14,12 +14,12 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
*/ */
import { OlmMachine, CrossSigningStatus } from "@matrix-org/matrix-sdk-crypto-wasm"; import { OlmMachine, CrossSigningStatus, CrossSigningBootstrapRequests } from "@matrix-org/matrix-sdk-crypto-wasm";
import * as RustSdkCryptoJs from "@matrix-org/matrix-sdk-crypto-wasm"; import * as RustSdkCryptoJs from "@matrix-org/matrix-sdk-crypto-wasm";
import { BootstrapCrossSigningOpts } from "../crypto-api"; import { BootstrapCrossSigningOpts } from "../crypto-api";
import { logger } from "../logger"; import { logger } from "../logger";
import { OutgoingRequest, OutgoingRequestProcessor } from "./OutgoingRequestProcessor"; import { OutgoingRequestProcessor } from "./OutgoingRequestProcessor";
import { UIAuthCallback } from "../interactive-auth"; import { UIAuthCallback } from "../interactive-auth";
import { ServerSideSecretStorage } from "../secret-storage"; import { ServerSideSecretStorage } from "../secret-storage";
@ -118,7 +118,7 @@ export class CrossSigningIdentity {
private async resetCrossSigning(authUploadDeviceSigningKeys?: UIAuthCallback<void>): Promise<void> { private async resetCrossSigning(authUploadDeviceSigningKeys?: UIAuthCallback<void>): Promise<void> {
// XXX: We must find a way to make this atomic, currently if the user does not remember his account password // XXX: We must find a way to make this atomic, currently if the user does not remember his account password
// or 4S passphrase/key the process will fail in a bad state, with keys rotated but not uploaded or saved in 4S. // or 4S passphrase/key the process will fail in a bad state, with keys rotated but not uploaded or saved in 4S.
const outgoingRequests: Array<OutgoingRequest> = await this.olmMachine.bootstrapCrossSigning(true); const outgoingRequests: CrossSigningBootstrapRequests = await this.olmMachine.bootstrapCrossSigning(true);
// If 4S is configured we need to udpate it. // If 4S is configured we need to udpate it.
if (await this.secretStorage.hasKey()) { if (await this.secretStorage.hasKey()) {
@ -128,8 +128,14 @@ export class CrossSigningIdentity {
await this.exportCrossSigningKeysToStorage(); await this.exportCrossSigningKeysToStorage();
} }
logger.log("bootStrapCrossSigning: publishing keys to server"); logger.log("bootStrapCrossSigning: publishing keys to server");
for (const req of outgoingRequests) { for (const req of [
await this.outgoingRequestProcessor.makeOutgoingRequest(req, authUploadDeviceSigningKeys); outgoingRequests.uploadKeysRequest,
outgoingRequests.uploadSigningKeysRequest,
outgoingRequests.uploadSignaturesRequest,
]) {
if (req) {
await this.outgoingRequestProcessor.makeOutgoingRequest(req, authUploadDeviceSigningKeys);
}
} }
} }

View File

@ -23,7 +23,7 @@ import {
RoomMessageRequest, RoomMessageRequest,
SignatureUploadRequest, SignatureUploadRequest,
ToDeviceRequest, ToDeviceRequest,
SigningKeysUploadRequest, UploadSigningKeysRequest,
} from "@matrix-org/matrix-sdk-crypto-wasm"; } from "@matrix-org/matrix-sdk-crypto-wasm";
import { logger } from "../logger"; import { logger } from "../logger";
@ -62,7 +62,7 @@ export class OutgoingRequestProcessor {
) {} ) {}
public async makeOutgoingRequest<T>( public async makeOutgoingRequest<T>(
msg: OutgoingRequest | SigningKeysUploadRequest, msg: OutgoingRequest | UploadSigningKeysRequest,
uiaCallback?: UIAuthCallback<T>, uiaCallback?: UIAuthCallback<T>,
): Promise<void> { ): Promise<void> {
let resp: string; let resp: string;
@ -92,7 +92,7 @@ export class OutgoingRequestProcessor {
`/_matrix/client/v3/rooms/${encodeURIComponent(msg.room_id)}/send/` + `/_matrix/client/v3/rooms/${encodeURIComponent(msg.room_id)}/send/` +
`${encodeURIComponent(msg.event_type)}/${encodeURIComponent(msg.txn_id)}`; `${encodeURIComponent(msg.event_type)}/${encodeURIComponent(msg.txn_id)}`;
resp = await this.rawJsonRequest(Method.Put, path, {}, msg.body); resp = await this.rawJsonRequest(Method.Put, path, {}, msg.body);
} else if (msg instanceof SigningKeysUploadRequest) { } else if (msg instanceof UploadSigningKeysRequest) {
await this.makeRequestWithUIA( await this.makeRequestWithUIA(
Method.Post, Method.Post,
"/_matrix/client/v3/keys/device_signing/upload", "/_matrix/client/v3/keys/device_signing/upload",

View File

@ -21,6 +21,7 @@ import { logger } from "./logger";
import { MatrixClient } from "./client"; import { MatrixClient } from "./client";
import { EventTimelineSet } from "./models/event-timeline-set"; import { EventTimelineSet } from "./models/event-timeline-set";
import { MatrixEvent } from "./models/event"; import { MatrixEvent } from "./models/event";
import { Room, RoomEvent } from "./models/room";
/** /**
* @internal * @internal
@ -74,6 +75,10 @@ export class TimelineWindow {
* are received from /sync; you should arrange to call {@link TimelineWindow#paginate} * are received from /sync; you should arrange to call {@link TimelineWindow#paginate}
* on {@link RoomEvent.Timeline} events. * on {@link RoomEvent.Timeline} events.
* *
* <p>Note that constructing an instance of this class for a room adds a
* listener for RoomEvent.Timeline events which is never removed. In theory
* this should not cause a leak since the EventEmitter uses weak mappings.
*
* @param client - MatrixClient to be used for context/pagination * @param client - MatrixClient to be used for context/pagination
* requests. * requests.
* *
@ -87,6 +92,7 @@ export class TimelineWindow {
opts: IOpts = {}, opts: IOpts = {},
) { ) {
this.windowLimit = opts.windowLimit || 1000; this.windowLimit = opts.windowLimit || 1000;
timelineSet.room?.on(RoomEvent.Timeline, this.onTimelineEvent.bind(this));
} }
/** /**
@ -193,6 +199,23 @@ export class TimelineWindow {
return false; return false;
} }
private onTimelineEvent(_event?: MatrixEvent, _room?: Room, _atStart?: boolean, removed?: boolean): void {
if (removed) {
this.onEventRemoved();
}
}
/**
* If an event was removed, meaning this window is longer than the timeline,
* shorten the window.
*/
private onEventRemoved(): void {
const events = this.getEvents();
if (events.length > 0 && events[events.length - 1] === undefined && this.end) {
this.end.index--;
}
}
/** /**
* Check if this window can be extended * Check if this window can be extended
* *

View File

@ -1590,10 +1590,10 @@
"@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/resolve-uri" "^3.1.0"
"@jridgewell/sourcemap-codec" "^1.4.14" "@jridgewell/sourcemap-codec" "^1.4.14"
"@matrix-org/matrix-sdk-crypto-wasm@^2.2.0": "@matrix-org/matrix-sdk-crypto-wasm@^3.0.1":
version "2.2.0" version "3.0.1"
resolved "https://registry.yarnpkg.com/@matrix-org/matrix-sdk-crypto-wasm/-/matrix-sdk-crypto-wasm-2.2.0.tgz#7c60afe01915281a6b71502821bc8e01afbfa70d" resolved "https://registry.yarnpkg.com/@matrix-org/matrix-sdk-crypto-wasm/-/matrix-sdk-crypto-wasm-3.0.1.tgz#56a0376f8a389264bcf4d5325b378a71f18b7664"
integrity sha512-txmvaTiZpVV0/kWCRcE7tZvRESCEc1ynLJDVh9OUsFlaXfl13c7qdD3E6IJEJ8YiPMIn+PHogdfBZsO84reaMg== integrity sha512-r0PBfUKlLHm67+fpIV21netX5+DujbY2XjJy7JUGJ55oW4XWBNbSf9vElfaQkrdt/iDscL/8I5PoD5lCuVW6zA==
"@matrix-org/olm@3.2.15": "@matrix-org/olm@3.2.15":
version "3.2.15" version "3.2.15"