mirror of
https://github.com/huggingface/diffusers.git
synced 2026-01-27 17:22:53 +03:00
68 lines
2.2 KiB
YAML
68 lines
2.2 KiB
YAML
name: Benchmarking tests
|
|
|
|
on:
|
|
workflow_dispatch:
|
|
schedule:
|
|
- cron: "30 1 1,15 * *" # every 2 weeks on the 1st and the 15th of every month at 1:30 AM
|
|
|
|
env:
|
|
DIFFUSERS_IS_CI: yes
|
|
HF_HUB_ENABLE_HF_TRANSFER: 1
|
|
HF_HOME: /mnt/cache
|
|
OMP_NUM_THREADS: 8
|
|
MKL_NUM_THREADS: 8
|
|
|
|
jobs:
|
|
torch_pipelines_cuda_benchmark_tests:
|
|
env:
|
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL_BENCHMARK }}
|
|
name: Torch Core Pipelines CUDA Benchmarking Tests
|
|
strategy:
|
|
fail-fast: false
|
|
max-parallel: 1
|
|
runs-on:
|
|
group: aws-g6-4xlarge-plus
|
|
container:
|
|
image: diffusers/diffusers-pytorch-compile-cuda
|
|
options: --shm-size "16gb" --ipc host --gpus 0
|
|
steps:
|
|
- name: Checkout diffusers
|
|
uses: actions/checkout@v3
|
|
with:
|
|
fetch-depth: 2
|
|
- name: NVIDIA-SMI
|
|
run: |
|
|
nvidia-smi
|
|
- name: Install dependencies
|
|
run: |
|
|
python -m venv /opt/venv && export PATH="/opt/venv/bin:$PATH"
|
|
python -m uv pip install -e [quality,test]
|
|
python -m uv pip install pandas peft
|
|
python -m uv pip uninstall transformers && python -m uv pip install transformers==4.48.0
|
|
- name: Environment
|
|
run: |
|
|
python utils/print_env.py
|
|
- name: Diffusers Benchmarking
|
|
env:
|
|
HF_TOKEN: ${{ secrets.DIFFUSERS_BOT_TOKEN }}
|
|
BASE_PATH: benchmark_outputs
|
|
run: |
|
|
export TOTAL_GPU_MEMORY=$(python -c "import torch; print(torch.cuda.get_device_properties(0).total_memory / (1024**3))")
|
|
cd benchmarks && mkdir ${BASE_PATH} && python run_all.py && python push_results.py
|
|
|
|
- name: Test suite reports artifacts
|
|
if: ${{ always() }}
|
|
uses: actions/upload-artifact@v4
|
|
with:
|
|
name: benchmark_test_reports
|
|
path: benchmarks/benchmark_outputs
|
|
|
|
- name: Report success status
|
|
if: ${{ success() }}
|
|
run: |
|
|
pip install requests && python utils/notify_benchmarking_status.py --status=success
|
|
|
|
- name: Report failure status
|
|
if: ${{ failure() }}
|
|
run: |
|
|
pip install requests && python utils/notify_benchmarking_status.py --status=failure |