name: Benchmarking tests on: workflow_dispatch: schedule: - cron: "30 1 1,15 * *" # every 2 weeks on the 1st and the 15th of every month at 1:30 AM env: DIFFUSERS_IS_CI: yes HF_HUB_ENABLE_HF_TRANSFER: 1 HF_HOME: /mnt/cache OMP_NUM_THREADS: 8 MKL_NUM_THREADS: 8 BASE_PATH: benchmark_outputs jobs: torch_models_cuda_benchmark_tests: env: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL_BENCHMARK }} name: Torch Core Models CUDA Benchmarking Tests strategy: fail-fast: false max-parallel: 1 runs-on: group: aws-g6e-4xlarge container: image: diffusers/diffusers-pytorch-cuda options: --shm-size "16gb" --ipc host --gpus all steps: - name: Checkout diffusers uses: actions/checkout@v3 with: fetch-depth: 2 - name: NVIDIA-SMI run: | nvidia-smi - name: Install dependencies run: | apt update apt install -y libpq-dev postgresql-client uv pip install -e ".[quality]" uv pip install -r benchmarks/requirements.txt - name: Environment run: | python utils/print_env.py - name: Diffusers Benchmarking env: HF_TOKEN: ${{ secrets.DIFFUSERS_HF_HUB_READ_TOKEN }} run: | cd benchmarks && python run_all.py - name: Push results to the Hub env: HF_TOKEN: ${{ secrets.DIFFUSERS_BOT_TOKEN }} run: | cd benchmarks && python push_results.py mkdir $BASE_PATH && cp *.csv $BASE_PATH - name: Test suite reports artifacts if: ${{ always() }} uses: actions/upload-artifact@v4 with: name: benchmark_test_reports path: benchmarks/${{ env.BASE_PATH }} # TODO: enable this once the connection problem has been resolved. - name: Update benchmarking results to DB env: PGDATABASE: metrics PGHOST: ${{ secrets.DIFFUSERS_BENCHMARKS_PGHOST }} PGUSER: transformers_benchmarks PGPASSWORD: ${{ secrets.DIFFUSERS_BENCHMARKS_PGPASSWORD }} BRANCH_NAME: ${{ github.head_ref || github.ref_name }} run: | git config --global --add safe.directory /__w/diffusers/diffusers commit_id=$GITHUB_SHA commit_msg=$(git show -s --format=%s "$commit_id" | cut -c1-70) cd benchmarks && python populate_into_db.py "$BRANCH_NAME" "$commit_id" "$commit_msg" - name: Report success status if: ${{ success() }} run: | pip install requests && python utils/notify_benchmarking_status.py --status=success - name: Report failure status if: ${{ failure() }} run: | pip install requests && python utils/notify_benchmarking_status.py --status=failure