You've already forked matrix-js-sdk
mirror of
https://github.com/matrix-org/matrix-js-sdk.git
synced 2025-07-31 15:24:23 +03:00
Merge branch 'develop' into andybalaam/unrevert-deletion-move-prs
This commit is contained in:
28
.github/actions/sign-release-tarball/action.yml
vendored
Normal file
28
.github/actions/sign-release-tarball/action.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
name: Sign Release Tarball
|
||||
description: Generates signature for release tarball and uploads it as a release asset
|
||||
inputs:
|
||||
gpg-fingerprint:
|
||||
description: Fingerprint of the GPG key to use for signing the tarball.
|
||||
required: true
|
||||
upload-url:
|
||||
description: GitHub release upload URL to upload the signature file to.
|
||||
required: true
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Generate tarball signature
|
||||
shell: bash
|
||||
run: |
|
||||
git -c tar.tar.gz.command='gzip -cn' archive --format=tar.gz --prefix="${REPO#*/}-${VERSION#v}/" -o "/tmp/${VERSION}.tar.gz" "${VERSION}"
|
||||
gpg -u "$GPG_FINGERPRINT" --armor --output "${VERSION}.tar.gz.asc" --detach-sig "/tmp/${VERSION}.tar.gz"
|
||||
rm "/tmp/${VERSION}.tar.gz"
|
||||
env:
|
||||
GPG_FINGERPRINT: ${{ inputs.gpg-fingerprint }}
|
||||
REPO: ${{ github.repository }}
|
||||
|
||||
- name: Upload tarball signature
|
||||
if: ${{ inputs.upload-url }}
|
||||
uses: shogo82148/actions-upload-release-asset@dccd6d23e64fd6a746dce6814c0bde0a04886085 # v1
|
||||
with:
|
||||
upload_url: ${{ inputs.upload-url }}
|
||||
asset_path: ${{ env.VERSION }}.tar.gz.asc
|
41
.github/actions/upload-release-assets/action.yml
vendored
Normal file
41
.github/actions/upload-release-assets/action.yml
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
name: Upload release assets
|
||||
description: Uploads assets to an existing release and optionally signs them
|
||||
inputs:
|
||||
gpg-fingerprint:
|
||||
description: Fingerprint of the GPG key to use for signing the assets, if any.
|
||||
required: false
|
||||
upload-url:
|
||||
description: GitHub release upload URL to upload the assets to.
|
||||
required: true
|
||||
asset-path:
|
||||
description: |
|
||||
The path to the asset you want to upload, if any. You can use glob patterns here.
|
||||
Will be GPG signed and an `.asc` file included in the release artifacts if `gpg-fingerprint` is set.
|
||||
required: true
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Sign assets
|
||||
if: inputs.gpg-fingerprint
|
||||
shell: bash
|
||||
run: |
|
||||
for FILE in $ASSET_PATH
|
||||
do
|
||||
gpg -u "$GPG_FINGERPRINT" --armor --output "$FILE".asc --detach-sig "$FILE"
|
||||
done
|
||||
env:
|
||||
GPG_FINGERPRINT: ${{ inputs.gpg-fingerprint }}
|
||||
ASSET_PATH: ${{ inputs.asset-path }}
|
||||
|
||||
- name: Upload asset signatures
|
||||
if: inputs.gpg-fingerprint
|
||||
uses: shogo82148/actions-upload-release-asset@dccd6d23e64fd6a746dce6814c0bde0a04886085 # v1
|
||||
with:
|
||||
upload_url: ${{ inputs.upload-url }}
|
||||
asset_path: ${{ inputs.asset-path }}.asc
|
||||
|
||||
- name: Upload assets
|
||||
uses: shogo82148/actions-upload-release-asset@dccd6d23e64fd6a746dce6814c0bde0a04886085 # v1
|
||||
with:
|
||||
upload_url: ${{ inputs.upload-url }}
|
||||
asset_path: ${{ inputs.asset-path }}
|
5
.github/release-drafter.yml
vendored
5
.github/release-drafter.yml
vendored
@ -19,6 +19,11 @@ version-resolver:
|
||||
labels:
|
||||
- "X-Breaking-Change"
|
||||
default: minor
|
||||
exclude-labels:
|
||||
- "T-Task"
|
||||
- "X-Reverted"
|
||||
exclude-contributors:
|
||||
- "RiotRobot"
|
||||
template: |
|
||||
$CHANGES
|
||||
prerelease: true
|
||||
|
3
.github/workflows/cypress.yml
vendored
3
.github/workflows/cypress.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
||||
# from creeping in. They take a long time to run and consume 4 concurrent runners.
|
||||
if: github.event.workflow_run.event == 'merge_group'
|
||||
|
||||
uses: matrix-org/matrix-react-sdk/.github/workflows/cypress.yaml@v3.83.0-rc.1
|
||||
uses: matrix-org/matrix-react-sdk/.github/workflows/cypress.yaml@03b01b4a50d0f3fbbfa6c1a9314ef2d346d089d4
|
||||
permissions:
|
||||
actions: read
|
||||
issues: read
|
||||
@ -33,7 +33,6 @@ jobs:
|
||||
TCMS_PASSWORD: ${{ secrets.TCMS_PASSWORD }}
|
||||
with:
|
||||
react-sdk-repository: matrix-org/matrix-react-sdk
|
||||
rust-crypto: true
|
||||
|
||||
# We want to make the cypress tests a required check for the merge queue.
|
||||
#
|
||||
|
192
.github/workflows/release-action.yml
vendored
192
.github/workflows/release-action.yml
vendored
@ -6,6 +6,10 @@ on:
|
||||
required: true
|
||||
NPM_TOKEN:
|
||||
required: false
|
||||
GPG_PASSPHRASE:
|
||||
required: false
|
||||
GPG_PRIVATE_KEY:
|
||||
required: false
|
||||
inputs:
|
||||
final:
|
||||
description: Make final release
|
||||
@ -22,11 +26,39 @@ on:
|
||||
`version` can be `"current"` to leave it at the current version.
|
||||
type: string
|
||||
required: false
|
||||
include-changes:
|
||||
description: Project to include changelog entries from in this release.
|
||||
type: string
|
||||
required: false
|
||||
gpg-fingerprint:
|
||||
description: Fingerprint of the GPG key to use for signing the git tag and assets, if any.
|
||||
type: string
|
||||
required: false
|
||||
asset-path:
|
||||
description: |
|
||||
The path to the asset you want to upload, if any. You can use glob patterns here.
|
||||
Will be GPG signed and an `.asc` file included in the release artifacts if `gpg-fingerprint` is set.
|
||||
type: string
|
||||
required: false
|
||||
expected-asset-count:
|
||||
description: The number of expected assets, including signatures, excluding generated zip & tarball.
|
||||
type: number
|
||||
required: false
|
||||
jobs:
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
environment: Release
|
||||
steps:
|
||||
- name: Load GPG key
|
||||
id: gpg
|
||||
if: inputs.gpg-fingerprint
|
||||
uses: crazy-max/ghaction-import-gpg@82a020f1f7f605c65dd2449b392a52c3fcfef7ef # v6
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
fingerprint: ${{ inputs.gpg-fingerprint }}
|
||||
|
||||
- name: Get draft release
|
||||
id: release
|
||||
uses: cardinalby/git-get-release-action@cedef2faf69cb7c55b285bad07688d04430b7ada # v1
|
||||
@ -49,17 +81,45 @@ jobs:
|
||||
persist-credentials: false
|
||||
path: .action-repo
|
||||
sparse-checkout: |
|
||||
.github/actions
|
||||
scripts/release
|
||||
|
||||
- name: Load version
|
||||
run: echo "VERSION=$VERSION" >> $GITHUB_ENV
|
||||
- name: Prepare variables
|
||||
id: prepare
|
||||
run: |
|
||||
echo "VERSION=$VERSION" >> $GITHUB_ENV
|
||||
{
|
||||
echo "RELEASE_NOTES<<EOF"
|
||||
echo "$BODY"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_ENV
|
||||
|
||||
HAS_DIST=0
|
||||
jq -e .scripts.dist package.json >/dev/null 2>&1 && HAS_DIST=1
|
||||
echo "has-dist-script=$HAS_DIST" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
BODY: ${{ steps.release.outputs.body }}
|
||||
VERSION: ${{ steps.release.outputs.tag_name }}
|
||||
|
||||
- name: Finalise version
|
||||
if: inputs.mode == 'final'
|
||||
if: inputs.final
|
||||
run: echo "VERSION=$(echo $VERSION | cut -d- -f1)" >> $GITHUB_ENV
|
||||
|
||||
- name: Check version number not in use
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const { VERSION } = process.env;
|
||||
github.rest.repos.getReleaseByTag({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag: VERSION,
|
||||
}).then(() => {
|
||||
core.setFailed(`Version ${VERSION} already exists`);
|
||||
}).catch(() => {
|
||||
// This is fine, we expect there to not be any release with this version yet
|
||||
});
|
||||
|
||||
- name: Set up git
|
||||
run: |
|
||||
git config --global user.email "releases@riot.im"
|
||||
@ -73,8 +133,10 @@ jobs:
|
||||
run: "yarn install --frozen-lockfile"
|
||||
|
||||
- name: Update dependencies
|
||||
id: update-dependencies
|
||||
if: inputs.dependencies
|
||||
run: |
|
||||
UPDATED=()
|
||||
while IFS= read -r DEPENDENCY; do
|
||||
[ -z "$DEPENDENCY" ] && continue
|
||||
IFS="=" read -r PACKAGE UPDATE_VERSION <<< "$DEPENDENCY"
|
||||
@ -98,7 +160,11 @@ jobs:
|
||||
yarn upgrade "$PACKAGE@$UPDATE_VERSION" --exact
|
||||
git add -u
|
||||
git commit -m "Upgrade $PACKAGE to $UPDATE_VERSION"
|
||||
UPDATED+=("$PACKAGE")
|
||||
done <<< "$DEPENDENCIES"
|
||||
|
||||
JSON=$(jq --compact-output --null-input '$ARGS.positional' --args -- "${UPDATED[@]}")
|
||||
echo "updated=$JSON" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
DEPENDENCIES: ${{ inputs.dependencies }}
|
||||
|
||||
@ -113,10 +179,32 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Bump package.json version
|
||||
run: yarn version --no-git-tag-version --new-version "$VERSION"
|
||||
run: yarn version --no-git-tag-version --new-version "${VERSION#v}"
|
||||
|
||||
- name: Ingest upstream changes
|
||||
if: |
|
||||
inputs.dependencies &&
|
||||
inputs.include-changes &&
|
||||
contains(fromJSON(steps.update-dependencies.outputs.updated), inputs.include-changes)
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
RELEASE_ID: ${{ steps.release.outputs.id }}
|
||||
DEPENDENCY: ${{ inputs.include-changes }}
|
||||
with:
|
||||
retries: 3
|
||||
script: |
|
||||
const { RELEASE_ID: releaseId, DEPENDENCY, VERSION } = process.env;
|
||||
const { owner, repo } = context.repo;
|
||||
const script = require("./.action-repo/scripts/release/merge-release-notes.js");
|
||||
const notes = await script({
|
||||
github,
|
||||
releaseId,
|
||||
dependencies: [DEPENDENCY.replace("$VERSION", VERSION)],
|
||||
});
|
||||
core.exportVariable("RELEASE_NOTES", notes);
|
||||
|
||||
- name: Add to CHANGELOG.md
|
||||
if: inputs.mode == 'final'
|
||||
if: inputs.final
|
||||
run: |
|
||||
mv CHANGELOG.md CHANGELOG.md.old
|
||||
HEADER="Changes in [${VERSION#v}](https://github.com/${{ github.repository }}/releases/tag/$VERSION) ($(date '+%Y-%m-%d'))"
|
||||
@ -125,25 +213,84 @@ jobs:
|
||||
echo "$HEADER"
|
||||
printf '=%.0s' $(seq ${#HEADER})
|
||||
echo ""
|
||||
echo "$BODY"
|
||||
echo "$RELEASE_NOTES"
|
||||
echo ""
|
||||
} > CHANGELOG.md
|
||||
|
||||
cat CHANGELOG.md.old >> CHANGELOG.md
|
||||
rm CHANGELOG.md.old
|
||||
git add CHANGELOG.md
|
||||
env:
|
||||
BODY: ${{ steps.release.outputs.body }}
|
||||
|
||||
- name: Run pre-release script to update package.json fields
|
||||
run: |
|
||||
./.action-repo/scripts/release/pre-release.sh
|
||||
git add package.json
|
||||
|
||||
- name: Commit and push changes
|
||||
- name: Commit changes
|
||||
run: git commit -m "$VERSION"
|
||||
|
||||
- name: Build assets
|
||||
if: steps.prepare.outputs.has-dist-script == '1'
|
||||
run: DIST_VERSION="$VERSION" yarn dist
|
||||
|
||||
- name: Upload release assets & signatures
|
||||
if: inputs.asset-path
|
||||
uses: ./.action-repo/.github/actions/upload-release-assets
|
||||
with:
|
||||
gpg-fingerprint: ${{ inputs.gpg-fingerprint }}
|
||||
upload-url: ${{ steps.release.outputs.upload_url }}
|
||||
asset-path: ${{ inputs.asset-path }}
|
||||
|
||||
- name: Create signed tag
|
||||
if: inputs.gpg-fingerprint
|
||||
run: |
|
||||
git commit -m "$VERSION"
|
||||
git push origin staging
|
||||
GIT_COMMITTER_EMAIL="$SIGNING_ID" GPG_TTY=$(tty) git tag -u "$SIGNING_ID" -m "Release $VERSION" "$VERSION"
|
||||
env:
|
||||
SIGNING_ID: ${{ steps.gpg.outputs.email }}
|
||||
|
||||
- name: Generate & upload tarball signature
|
||||
if: inputs.gpg-fingerprint
|
||||
uses: ./.action-repo/.github/actions/sign-release-tarball
|
||||
with:
|
||||
gpg-fingerprint: ${{ inputs.gpg-fingerprint }}
|
||||
upload-url: ${{ steps.release.outputs.upload_url }}
|
||||
|
||||
# We defer pushing changes until after the release assets are built,
|
||||
# signed & uploaded to improve the atomicity of this action.
|
||||
- name: Push changes to staging
|
||||
run: |
|
||||
git push origin staging $TAG
|
||||
git reset --hard
|
||||
env:
|
||||
TAG: ${{ inputs.gpg-fingerprint && env.VERSION || '' }}
|
||||
|
||||
- name: Validate tarball signature
|
||||
if: inputs.gpg-fingerprint
|
||||
run: |
|
||||
wget https://github.com/$GITHUB_REPOSITORY/archive/refs/tags/$VERSION.tar.gz
|
||||
gpg --verify "$VERSION.tar.gz.asc" "$VERSION.tar.gz"
|
||||
|
||||
- name: Validate release has expected assets
|
||||
if: inputs.expected-asset-count
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
RELEASE_ID: ${{ steps.release.outputs.id }}
|
||||
EXPECTED_ASSET_COUNT: ${{ inputs.expected-asset-count }}
|
||||
with:
|
||||
retries: 3
|
||||
script: |
|
||||
const { RELEASE_ID: release_id, EXPECTED_ASSET_COUNT } = process.env;
|
||||
const { owner, repo } = context.repo;
|
||||
|
||||
const { data: release } = await github.rest.repos.getRelease({
|
||||
owner,
|
||||
repo,
|
||||
release_id,
|
||||
});
|
||||
|
||||
if (release.assets.length !== parseInt(EXPECTED_ASSET_COUNT, 10)) {
|
||||
core.setFailed(`Found ${release.assets.length} assets but expected ${EXPECTED_ASSET_COUNT}`);
|
||||
}
|
||||
|
||||
- name: Merge to master
|
||||
if: inputs.final
|
||||
@ -154,15 +301,14 @@ jobs:
|
||||
|
||||
- name: Publish release
|
||||
uses: actions/github-script@v6
|
||||
id: my-script
|
||||
env:
|
||||
RELEASE_ID: ${{ steps.release.outputs.id }}
|
||||
FINAL: ${{ inputs.final }}
|
||||
with:
|
||||
result-encoding: string
|
||||
retries: 3
|
||||
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
script: |
|
||||
let { RELEASE_ID: release_id, VERSION, FINAL } = process.env;
|
||||
const { RELEASE_ID: release_id, RELEASE_NOTES, VERSION, FINAL } = process.env;
|
||||
const { owner, repo } = context.repo;
|
||||
|
||||
const opts = {
|
||||
@ -172,6 +318,7 @@ jobs:
|
||||
tag_name: VERSION,
|
||||
name: VERSION,
|
||||
draft: false,
|
||||
body: RELEASE_NOTES,
|
||||
};
|
||||
|
||||
if (FINAL == "true") {
|
||||
@ -188,3 +335,20 @@ jobs:
|
||||
uses: matrix-org/matrix-js-sdk/.github/workflows/release-npm.yml@develop
|
||||
secrets:
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
update-labels:
|
||||
name: Advance release blocker labels
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: repository
|
||||
run: echo "REPO=${GITHUB_REPOSITORY#*/}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: garganshu/github-label-updater@3770d15ebfed2fe2cb06a241047bc340f774a7d1 # v1.0.0
|
||||
with:
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ steps.repository.outputs.REPO }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
filter-labels: X-Upcoming-Release-Blocker
|
||||
remove-labels: X-Upcoming-Release-Blocker
|
||||
add-labels: X-Release-Blocker
|
||||
|
9
.github/workflows/release-drafter.yml
vendored
9
.github/workflows/release-drafter.yml
vendored
@ -2,13 +2,20 @@ name: Release Drafter
|
||||
on:
|
||||
push:
|
||||
branches: [staging]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
previous-version:
|
||||
description: What release to use as a base for release note purposes
|
||||
required: false
|
||||
type: string
|
||||
concurrency: ${{ github.workflow }}
|
||||
jobs:
|
||||
draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@dabcf3767562210392d862070ed2ef6434b9bc6f # v5
|
||||
- uses: release-drafter/release-drafter@e64b19c4c46173209ed9f2e5a2f4ca7de89a0e86 # v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
disable-autolabeler: true
|
||||
previous-version: ${{ inputs.previous-version }}
|
||||
|
2
.github/workflows/release-npm.yml
vendored
2
.github/workflows/release-npm.yml
vendored
@ -11,6 +11,8 @@ jobs:
|
||||
steps:
|
||||
- name: 🧮 Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: staging
|
||||
|
||||
- name: 🔧 Yarn cache
|
||||
uses: actions/setup-node@v4
|
||||
|
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
specs: [integ, unit]
|
||||
node: [18, "*"]
|
||||
node: [18, "lts/*", 21]
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
10
.github/workflows/triage-labelled.yml
vendored
Normal file
10
.github/workflows/triage-labelled.yml
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
name: Move labelled issues to correct projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
call-triage-labelled:
|
||||
uses: vector-im/element-web/.github/workflows/triage-labelled.yml@develop
|
||||
secrets: inherit
|
@ -1,3 +1,9 @@
|
||||
Changes in [30.0.1](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v30.0.1) (2023-11-13)
|
||||
==================================================================================================
|
||||
|
||||
## 🐛 Bug Fixes
|
||||
* Ensure `setUserCreator` is called when a store is assigned ([\#3867](https://github.com/matrix-org/matrix-js-sdk/pull/3867)). Fixes vector-im/element-web#26520. Contributed by @MidhunSureshR.
|
||||
|
||||
Changes in [30.0.0](https://github.com/matrix-org/matrix-js-sdk/releases/tag/v30.0.0) (2023-11-07)
|
||||
==================================================================================================
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "matrix-js-sdk",
|
||||
"version": "30.0.0",
|
||||
"version": "30.0.1",
|
||||
"description": "Matrix Client-Server SDK for Javascript",
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
@ -51,7 +51,7 @@
|
||||
],
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@matrix-org/matrix-sdk-crypto-wasm": "^2.2.0",
|
||||
"@matrix-org/matrix-sdk-crypto-wasm": "^3.0.1",
|
||||
"another-json": "^0.2.0",
|
||||
"bs58": "^5.0.0",
|
||||
"content-type": "^1.0.4",
|
||||
|
88
scripts/release/merge-release-notes.js
Executable file
88
scripts/release/merge-release-notes.js
Executable file
@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require("fs");
|
||||
|
||||
async function getRelease(github, dependency) {
|
||||
let owner;
|
||||
let repo;
|
||||
let tag;
|
||||
if (dependency.includes("/") && dependency.includes("@")) {
|
||||
owner = dependency.split("/")[0];
|
||||
repo = dependency.split("/")[1].split("@")[0];
|
||||
tag = dependency.split("@")[1];
|
||||
} else {
|
||||
const upstreamPackageJson = JSON.parse(fs.readFileSync(`./node_modules/${dependency}/package.json`, "utf8"));
|
||||
[owner, repo] = upstreamPackageJson.repository.url.split("/").slice(-2);
|
||||
tag = `v${upstreamPackageJson.version}`;
|
||||
}
|
||||
|
||||
const response = await github.rest.repos.getReleaseByTag({
|
||||
owner,
|
||||
repo,
|
||||
tag,
|
||||
});
|
||||
return response.data;
|
||||
}
|
||||
|
||||
const main = async ({ github, releaseId, dependencies }) => {
|
||||
const { GITHUB_REPOSITORY } = process.env;
|
||||
const [owner, repo] = GITHUB_REPOSITORY.split("/");
|
||||
|
||||
const sections = new Map();
|
||||
let heading = null;
|
||||
for (const dependency of dependencies) {
|
||||
const release = await getRelease(github, dependency);
|
||||
for (const line of release.body.split("\n")) {
|
||||
if (line.startsWith("#")) {
|
||||
heading = line;
|
||||
sections.set(heading, []);
|
||||
continue;
|
||||
}
|
||||
if (heading && line) {
|
||||
sections.get(heading).push(line);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const { data: release } = await github.rest.repos.getRelease({
|
||||
owner,
|
||||
repo,
|
||||
release_id: releaseId,
|
||||
});
|
||||
|
||||
heading = null;
|
||||
const output = [];
|
||||
for (const line of [...release.body.split("\n"), null]) {
|
||||
if (line === null || line.startsWith("#")) {
|
||||
if (heading && sections.has(heading)) {
|
||||
const lastIsBlank = !output.at(-1)?.trim();
|
||||
if (lastIsBlank) output.pop();
|
||||
output.push(...sections.get(heading));
|
||||
if (lastIsBlank) output.push("");
|
||||
}
|
||||
heading = line;
|
||||
}
|
||||
output.push(line);
|
||||
}
|
||||
|
||||
return output.join("\n");
|
||||
};
|
||||
|
||||
// This is just for testing locally
|
||||
// Needs environment variables GITHUB_TOKEN & GITHUB_REPOSITORY
|
||||
if (require.main === module) {
|
||||
const { Octokit } = require("@octokit/rest");
|
||||
const github = new Octokit({ auth: process.env.GITHUB_TOKEN });
|
||||
if (process.argv.length < 4) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error("Usage: node merge-release-notes.js owner/repo:release_id npm-package-name ...");
|
||||
process.exit(1);
|
||||
}
|
||||
const [releaseId, ...dependencies] = process.argv.slice(2);
|
||||
main({ github, releaseId, dependencies }).then((output) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(output);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = main;
|
@ -315,6 +315,7 @@ export interface IMessageOpts {
|
||||
event?: boolean;
|
||||
relatesTo?: IEventRelation;
|
||||
ts?: number;
|
||||
unsigned?: IUnsigned;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -85,9 +85,7 @@ describe("CallMembership", () => {
|
||||
|
||||
it("considers memberships expired when local age large", () => {
|
||||
const fakeEvent = makeMockEvent(1000);
|
||||
const evAge = 6000;
|
||||
fakeEvent.getLocalAge = jest.fn().mockReturnValue(evAge);
|
||||
fakeEvent.localTimestamp = Date.now() - evAge;
|
||||
fakeEvent.getLocalAge = jest.fn().mockReturnValue(6000);
|
||||
const membership = new CallMembership(fakeEvent, membershipTemplate);
|
||||
expect(membership.isExpired()).toEqual(true);
|
||||
});
|
||||
|
@ -61,7 +61,7 @@ export function mockRTCEvent(
|
||||
getSender: jest.fn().mockReturnValue("@mock:user.example"),
|
||||
getTs: jest.fn().mockReturnValue(1000),
|
||||
getLocalAge: getLocalAgeFn,
|
||||
localTimestamp: Date.now() - getLocalAgeFn(),
|
||||
localTimestamp: Date.now(),
|
||||
getRoomId: jest.fn().mockReturnValue(roomId),
|
||||
sender: {
|
||||
userId: "@mock:user.example",
|
||||
|
@ -24,7 +24,7 @@ import {
|
||||
KeysUploadRequest,
|
||||
RoomMessageRequest,
|
||||
SignatureUploadRequest,
|
||||
SigningKeysUploadRequest,
|
||||
UploadSigningKeysRequest,
|
||||
ToDeviceRequest,
|
||||
} from "@matrix-org/matrix-sdk-crypto-wasm";
|
||||
|
||||
@ -173,10 +173,10 @@ describe("OutgoingRequestProcessor", () => {
|
||||
httpBackend.verifyNoOutstandingRequests();
|
||||
});
|
||||
|
||||
it("should handle SigningKeysUploadRequests without UIA", async () => {
|
||||
it("should handle UploadSigningKeysRequest without UIA", async () => {
|
||||
// first, mock up a request as we might expect to receive it from the Rust layer ...
|
||||
const testReq = { foo: "bar" };
|
||||
const outgoingRequest = new SigningKeysUploadRequest(JSON.stringify(testReq));
|
||||
const outgoingRequest = new UploadSigningKeysRequest(JSON.stringify(testReq));
|
||||
|
||||
// ... then poke the request into the OutgoingRequestProcessor under test
|
||||
const reqProm = processor.makeOutgoingRequest(outgoingRequest);
|
||||
@ -200,10 +200,10 @@ describe("OutgoingRequestProcessor", () => {
|
||||
httpBackend.verifyNoOutstandingRequests();
|
||||
});
|
||||
|
||||
it("should handle SigningKeysUploadRequests with UIA", async () => {
|
||||
it("should handle UploadSigningKeysRequest with UIA", async () => {
|
||||
// first, mock up a request as we might expect to receive it from the Rust layer ...
|
||||
const testReq = { foo: "bar" };
|
||||
const outgoingRequest = new SigningKeysUploadRequest(JSON.stringify(testReq));
|
||||
const outgoingRequest = new UploadSigningKeysRequest(JSON.stringify(testReq));
|
||||
|
||||
// also create a UIA callback
|
||||
const authCallback: UIAuthCallback<Object> = async (makeRequest) => {
|
||||
|
@ -22,12 +22,15 @@ import { Room } from "../../src/models/room";
|
||||
import { EventTimeline } from "../../src/models/event-timeline";
|
||||
import { TimelineIndex, TimelineWindow } from "../../src/timeline-window";
|
||||
import { mkMessage } from "../test-utils/test-utils";
|
||||
import { MatrixEvent } from "../../src/models/event";
|
||||
|
||||
const ROOM_ID = "roomId";
|
||||
const USER_ID = "userId";
|
||||
const mockClient = {
|
||||
getEventTimeline: jest.fn(),
|
||||
paginateEventTimeline: jest.fn(),
|
||||
supportsThreads: jest.fn(),
|
||||
getUserId: jest.fn().mockReturnValue(USER_ID),
|
||||
} as unknown as MockedObject<MatrixClient>;
|
||||
|
||||
/*
|
||||
@ -64,6 +67,23 @@ function addEventsToTimeline(timeline: EventTimeline, numEvents: number, toStart
|
||||
}
|
||||
}
|
||||
|
||||
function createEvents(numEvents: number): Array<MatrixEvent> {
|
||||
const ret = [];
|
||||
|
||||
for (let i = 0; i < numEvents; i++) {
|
||||
ret.push(
|
||||
mkMessage({
|
||||
room: ROOM_ID,
|
||||
user: USER_ID,
|
||||
event: true,
|
||||
unsigned: { age: 1 },
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
/*
|
||||
* create a pair of linked timelines
|
||||
*/
|
||||
@ -412,4 +432,46 @@ describe("TimelineWindow", function () {
|
||||
expect(timelineWindow.canPaginate(EventTimeline.FORWARDS)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
function idsOf(events: Array<MatrixEvent>): Array<string> {
|
||||
return events.map((e) => (e ? e.getId() ?? "MISSING_ID" : "MISSING_EVENT"));
|
||||
}
|
||||
|
||||
describe("removing events", () => {
|
||||
it("should shorten if removing an event within the window makes it overflow", function () {
|
||||
// Given a room with events in two timelines
|
||||
const room = new Room(ROOM_ID, mockClient, USER_ID, { timelineSupport: true });
|
||||
const timelineSet = room.getUnfilteredTimelineSet();
|
||||
const liveTimeline = room.getLiveTimeline();
|
||||
const oldTimeline = room.addTimeline();
|
||||
liveTimeline.setNeighbouringTimeline(oldTimeline, EventTimeline.BACKWARDS);
|
||||
oldTimeline.setNeighbouringTimeline(liveTimeline, EventTimeline.FORWARDS);
|
||||
|
||||
const oldEvents = createEvents(5);
|
||||
const liveEvents = createEvents(5);
|
||||
const [, , e3, e4, e5] = oldEvents;
|
||||
const [, e7, e8, e9, e10] = liveEvents;
|
||||
room.addLiveEvents(liveEvents);
|
||||
room.addEventsToTimeline(oldEvents, true, oldTimeline);
|
||||
|
||||
// And 2 windows over the timelines in this room
|
||||
const oldWindow = new TimelineWindow(mockClient, timelineSet);
|
||||
oldWindow.load(e5.getId(), 6);
|
||||
expect(idsOf(oldWindow.getEvents())).toEqual(idsOf([e5, e4, e3]));
|
||||
|
||||
const newWindow = new TimelineWindow(mockClient, timelineSet);
|
||||
newWindow.load(e9.getId(), 4);
|
||||
expect(idsOf(newWindow.getEvents())).toEqual(idsOf([e7, e8, e9, e10]));
|
||||
|
||||
// When I remove an event
|
||||
room.removeEvent(e8.getId()!);
|
||||
|
||||
// Then the affected timeline is shortened (because it would have
|
||||
// been too long with the removed event gone)
|
||||
expect(idsOf(newWindow.getEvents())).toEqual(idsOf([e7, e9, e10]));
|
||||
|
||||
// And the unaffected one is not
|
||||
expect(idsOf(oldWindow.getEvents())).toEqual(idsOf([e5, e4, e3]));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1217,7 +1217,7 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
|
||||
public reEmitter = new TypedReEmitter<EmittedEvents, ClientEventHandlerMap>(this);
|
||||
public olmVersion: [number, number, number] | null = null; // populated after initCrypto
|
||||
public usingExternalCrypto = false;
|
||||
public store: Store;
|
||||
private _store!: Store;
|
||||
public deviceId: string | null;
|
||||
public credentials: { userId: string | null };
|
||||
|
||||
@ -1332,7 +1332,6 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
|
||||
|
||||
this.usingExternalCrypto = opts.usingExternalCrypto ?? false;
|
||||
this.store = opts.store || new StubStore();
|
||||
this.store.setUserCreator((userId) => User.createUser(userId, this));
|
||||
this.deviceId = opts.deviceId || null;
|
||||
this.sessionId = randomString(10);
|
||||
|
||||
@ -1497,6 +1496,15 @@ export class MatrixClient extends TypedEventEmitter<EmittedEvents, ClientEventHa
|
||||
this._secretStorage = new ServerSideSecretStorageImpl(this, opts.cryptoCallbacks ?? {});
|
||||
}
|
||||
|
||||
public set store(newStore: Store) {
|
||||
this._store = newStore;
|
||||
this._store.setUserCreator((userId) => User.createUser(userId, this));
|
||||
}
|
||||
|
||||
public get store(): Store {
|
||||
return this._store;
|
||||
}
|
||||
|
||||
/**
|
||||
* High level helper method to begin syncing and poll for new events. To listen for these
|
||||
* events, add a listener for {@link ClientEvent.Event}
|
||||
|
@ -91,7 +91,7 @@ export class CallMembership {
|
||||
}
|
||||
|
||||
public isExpired(): boolean {
|
||||
return this.getMsUntilExpiry() <= 0;
|
||||
return this.getAbsoluteExpiry() < this.parentEvent.getTs() + this.parentEvent.getLocalAge();
|
||||
}
|
||||
|
||||
public getActiveFoci(): Focus[] {
|
||||
|
@ -392,7 +392,7 @@ export class MatrixEvent extends TypedEventEmitter<MatrixEventEmittedEvents, Mat
|
||||
});
|
||||
|
||||
this.txnId = event.txn_id;
|
||||
this.localTimestamp = Date.now() - (this.getAge() ?? this.fallbackAge());
|
||||
this.localTimestamp = Date.now() - (this.getAge() ?? 0);
|
||||
this.reEmitter = new TypedReEmitter(this);
|
||||
}
|
||||
|
||||
@ -663,21 +663,6 @@ export class MatrixEvent extends TypedEventEmitter<MatrixEventEmittedEvents, Mat
|
||||
return this.getUnsigned().age || this.event.age; // v2 / v1
|
||||
}
|
||||
|
||||
/**
|
||||
* The fallbackAge is computed by using the origin_server_ts. So it is not adjusted
|
||||
* to the local device clock. It should never be used.
|
||||
* If there is no unsigned field in the event this is a better fallback then 0.
|
||||
* It is supposed to only be used like this: `ev.getAge() ?? ev.fallbackAge()`
|
||||
*/
|
||||
private fallbackAge(): number {
|
||||
if (!this.getAge()) {
|
||||
logger.warn(
|
||||
"Age for event was not available, using `now - origin_server_ts` as a fallback. If the device clock is not correct issues might occur.",
|
||||
);
|
||||
}
|
||||
return Math.max(Date.now() - this.getTs(), 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the age of the event when this function was called.
|
||||
* This is the 'age' field adjusted according to how long this client has
|
||||
@ -1399,7 +1384,7 @@ export class MatrixEvent extends TypedEventEmitter<MatrixEventEmittedEvents, Mat
|
||||
this.emit(MatrixEventEvent.LocalEventIdReplaced, this);
|
||||
}
|
||||
|
||||
this.localTimestamp = Date.now() - (this.getAge() ?? this.fallbackAge());
|
||||
this.localTimestamp = Date.now() - this.getAge()!;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -14,12 +14,12 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import { OlmMachine, CrossSigningStatus } from "@matrix-org/matrix-sdk-crypto-wasm";
|
||||
import { OlmMachine, CrossSigningStatus, CrossSigningBootstrapRequests } from "@matrix-org/matrix-sdk-crypto-wasm";
|
||||
import * as RustSdkCryptoJs from "@matrix-org/matrix-sdk-crypto-wasm";
|
||||
|
||||
import { BootstrapCrossSigningOpts } from "../crypto-api";
|
||||
import { logger } from "../logger";
|
||||
import { OutgoingRequest, OutgoingRequestProcessor } from "./OutgoingRequestProcessor";
|
||||
import { OutgoingRequestProcessor } from "./OutgoingRequestProcessor";
|
||||
import { UIAuthCallback } from "../interactive-auth";
|
||||
import { ServerSideSecretStorage } from "../secret-storage";
|
||||
|
||||
@ -118,7 +118,7 @@ export class CrossSigningIdentity {
|
||||
private async resetCrossSigning(authUploadDeviceSigningKeys?: UIAuthCallback<void>): Promise<void> {
|
||||
// XXX: We must find a way to make this atomic, currently if the user does not remember his account password
|
||||
// or 4S passphrase/key the process will fail in a bad state, with keys rotated but not uploaded or saved in 4S.
|
||||
const outgoingRequests: Array<OutgoingRequest> = await this.olmMachine.bootstrapCrossSigning(true);
|
||||
const outgoingRequests: CrossSigningBootstrapRequests = await this.olmMachine.bootstrapCrossSigning(true);
|
||||
|
||||
// If 4S is configured we need to udpate it.
|
||||
if (await this.secretStorage.hasKey()) {
|
||||
@ -128,8 +128,14 @@ export class CrossSigningIdentity {
|
||||
await this.exportCrossSigningKeysToStorage();
|
||||
}
|
||||
logger.log("bootStrapCrossSigning: publishing keys to server");
|
||||
for (const req of outgoingRequests) {
|
||||
await this.outgoingRequestProcessor.makeOutgoingRequest(req, authUploadDeviceSigningKeys);
|
||||
for (const req of [
|
||||
outgoingRequests.uploadKeysRequest,
|
||||
outgoingRequests.uploadSigningKeysRequest,
|
||||
outgoingRequests.uploadSignaturesRequest,
|
||||
]) {
|
||||
if (req) {
|
||||
await this.outgoingRequestProcessor.makeOutgoingRequest(req, authUploadDeviceSigningKeys);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,7 +23,7 @@ import {
|
||||
RoomMessageRequest,
|
||||
SignatureUploadRequest,
|
||||
ToDeviceRequest,
|
||||
SigningKeysUploadRequest,
|
||||
UploadSigningKeysRequest,
|
||||
} from "@matrix-org/matrix-sdk-crypto-wasm";
|
||||
|
||||
import { logger } from "../logger";
|
||||
@ -62,7 +62,7 @@ export class OutgoingRequestProcessor {
|
||||
) {}
|
||||
|
||||
public async makeOutgoingRequest<T>(
|
||||
msg: OutgoingRequest | SigningKeysUploadRequest,
|
||||
msg: OutgoingRequest | UploadSigningKeysRequest,
|
||||
uiaCallback?: UIAuthCallback<T>,
|
||||
): Promise<void> {
|
||||
let resp: string;
|
||||
@ -92,7 +92,7 @@ export class OutgoingRequestProcessor {
|
||||
`/_matrix/client/v3/rooms/${encodeURIComponent(msg.room_id)}/send/` +
|
||||
`${encodeURIComponent(msg.event_type)}/${encodeURIComponent(msg.txn_id)}`;
|
||||
resp = await this.rawJsonRequest(Method.Put, path, {}, msg.body);
|
||||
} else if (msg instanceof SigningKeysUploadRequest) {
|
||||
} else if (msg instanceof UploadSigningKeysRequest) {
|
||||
await this.makeRequestWithUIA(
|
||||
Method.Post,
|
||||
"/_matrix/client/v3/keys/device_signing/upload",
|
||||
|
@ -21,6 +21,7 @@ import { logger } from "./logger";
|
||||
import { MatrixClient } from "./client";
|
||||
import { EventTimelineSet } from "./models/event-timeline-set";
|
||||
import { MatrixEvent } from "./models/event";
|
||||
import { Room, RoomEvent } from "./models/room";
|
||||
|
||||
/**
|
||||
* @internal
|
||||
@ -74,6 +75,10 @@ export class TimelineWindow {
|
||||
* are received from /sync; you should arrange to call {@link TimelineWindow#paginate}
|
||||
* on {@link RoomEvent.Timeline} events.
|
||||
*
|
||||
* <p>Note that constructing an instance of this class for a room adds a
|
||||
* listener for RoomEvent.Timeline events which is never removed. In theory
|
||||
* this should not cause a leak since the EventEmitter uses weak mappings.
|
||||
*
|
||||
* @param client - MatrixClient to be used for context/pagination
|
||||
* requests.
|
||||
*
|
||||
@ -87,6 +92,7 @@ export class TimelineWindow {
|
||||
opts: IOpts = {},
|
||||
) {
|
||||
this.windowLimit = opts.windowLimit || 1000;
|
||||
timelineSet.room?.on(RoomEvent.Timeline, this.onTimelineEvent.bind(this));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -193,6 +199,23 @@ export class TimelineWindow {
|
||||
return false;
|
||||
}
|
||||
|
||||
private onTimelineEvent(_event?: MatrixEvent, _room?: Room, _atStart?: boolean, removed?: boolean): void {
|
||||
if (removed) {
|
||||
this.onEventRemoved();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If an event was removed, meaning this window is longer than the timeline,
|
||||
* shorten the window.
|
||||
*/
|
||||
private onEventRemoved(): void {
|
||||
const events = this.getEvents();
|
||||
if (events.length > 0 && events[events.length - 1] === undefined && this.end) {
|
||||
this.end.index--;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this window can be extended
|
||||
*
|
||||
|
@ -1590,10 +1590,10 @@
|
||||
"@jridgewell/resolve-uri" "^3.1.0"
|
||||
"@jridgewell/sourcemap-codec" "^1.4.14"
|
||||
|
||||
"@matrix-org/matrix-sdk-crypto-wasm@^2.2.0":
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@matrix-org/matrix-sdk-crypto-wasm/-/matrix-sdk-crypto-wasm-2.2.0.tgz#7c60afe01915281a6b71502821bc8e01afbfa70d"
|
||||
integrity sha512-txmvaTiZpVV0/kWCRcE7tZvRESCEc1ynLJDVh9OUsFlaXfl13c7qdD3E6IJEJ8YiPMIn+PHogdfBZsO84reaMg==
|
||||
"@matrix-org/matrix-sdk-crypto-wasm@^3.0.1":
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@matrix-org/matrix-sdk-crypto-wasm/-/matrix-sdk-crypto-wasm-3.0.1.tgz#56a0376f8a389264bcf4d5325b378a71f18b7664"
|
||||
integrity sha512-r0PBfUKlLHm67+fpIV21netX5+DujbY2XjJy7JUGJ55oW4XWBNbSf9vElfaQkrdt/iDscL/8I5PoD5lCuVW6zA==
|
||||
|
||||
"@matrix-org/olm@3.2.15":
|
||||
version "3.2.15"
|
||||
|
Reference in New Issue
Block a user